summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--BUGS9
-rw-r--r--Makefile2
-rw-r--r--README.ddtp74
-rw-r--r--apt-pkg/acquire-item.cc29
-rw-r--r--apt-pkg/acquire-item.h689
-rw-r--r--apt-pkg/acquire-method.h9
-rw-r--r--apt-pkg/acquire-worker.h259
-rw-r--r--apt-pkg/acquire.h541
-rw-r--r--apt-pkg/algorithms.cc91
-rw-r--r--apt-pkg/cacheiterators.h84
-rw-r--r--apt-pkg/cdrom.cc47
-rw-r--r--apt-pkg/cdrom.h7
-rw-r--r--apt-pkg/contrib/strutl.cc43
-rw-r--r--apt-pkg/contrib/strutl.h3
-rw-r--r--apt-pkg/deb/debindexfile.cc174
-rw-r--r--apt-pkg/deb/debindexfile.h30
-rw-r--r--apt-pkg/deb/deblistparser.cc43
-rw-r--r--apt-pkg/deb/deblistparser.h4
-rw-r--r--apt-pkg/deb/debmetaindex.cc17
-rw-r--r--apt-pkg/deb/debrecords.cc23
-rw-r--r--apt-pkg/deb/debrecords.h2
-rw-r--r--apt-pkg/deb/dpkgpm.h2
-rw-r--r--apt-pkg/depcache.cc514
-rw-r--r--apt-pkg/depcache.h224
-rw-r--r--apt-pkg/indexcopy.cc183
-rw-r--r--apt-pkg/indexcopy.h13
-rw-r--r--apt-pkg/indexfile.cc63
-rw-r--r--apt-pkg/indexfile.h7
-rw-r--r--apt-pkg/init.cc5
-rw-r--r--apt-pkg/init.h4
-rw-r--r--apt-pkg/makefile2
-rw-r--r--apt-pkg/packagemanager.cc15
-rw-r--r--apt-pkg/packagemanager.h32
-rw-r--r--apt-pkg/pkgcache.cc32
-rw-r--r--apt-pkg/pkgcache.h44
-rw-r--r--apt-pkg/pkgcachegen.cc119
-rw-r--r--apt-pkg/pkgcachegen.h6
-rw-r--r--apt-pkg/pkgrecords.cc9
-rw-r--r--apt-pkg/pkgrecords.h2
-rw-r--r--buildlib/environment.mak.in2
-rw-r--r--cmdline/apt-cache.cc100
-rw-r--r--cmdline/apt-get.cc92
-rwxr-xr-xcmdline/apt-mark63
-rw-r--r--configure.in13
-rw-r--r--debian/changelog75
-rwxr-xr-xdebian/rules2
-rw-r--r--doc/Doxyfile.in1238
-rw-r--r--doc/apt_preferences.5.xml2
-rw-r--r--doc/examples/configure-index12
-rw-r--r--doc/makefile21
-rw-r--r--methods/gzip.cc2
-rw-r--r--methods/makefile2
52 files changed, 4828 insertions, 252 deletions
diff --git a/BUGS b/BUGS
new file mode 100644
index 000000000..a7b6b1114
--- /dev/null
+++ b/BUGS
@@ -0,0 +1,9 @@
+
+DDTP problems:
+--------------
+- apt-get update clean the /var/lib/apt/lists dir
+ from all Translation-$index that are not in the current
+ enviroment or Translations apt variable
+- there needs to be a list of locales (pt, sv, en) that need
+ both language and country code to get the right file
+ (is in the code in indexfile::LanguageCode(), just a bit ugly
diff --git a/Makefile b/Makefile
index 72cac61b5..664caca41 100644
--- a/Makefile
+++ b/Makefile
@@ -26,7 +26,7 @@ maintainer-clean dist-clean distclean pristine sanity: veryclean
# The startup target builds the necessary configure scripts. It should
# be used after a CVS checkout.
-CONVERTED=environment.mak include/config.h include/apti18n.h makefile
+CONVERTED=environment.mak include/config.h include/apti18n.h build/doc/Doxyfile makefile
include buildlib/configure.mak
$(BUILDDIR)/include/config.h: buildlib/config.h.in
$(BUILDDIR)/include/apti18n.h: buildlib/apti18n.h.in
diff --git a/README.ddtp b/README.ddtp
new file mode 100644
index 000000000..98f6109aa
--- /dev/null
+++ b/README.ddtp
@@ -0,0 +1,74 @@
+TODO:
+- URL-Remap for the translation files (to hack around the problem that
+ they are not on any ftp server yet but only on http://ddtp.debian.org/)
+
+Here is the original announcement of the ddtp support:
+
+* To: debian-devel-announce@lists.debian.org
+* Subject: Translate files
+* From: Michael Bramer <grisu@debian.org>
+* Date: Sun, 6 Oct 2002 21:56:06 +0200
+* Mail-followup-to: debian-devel@lists.debian.org
+* Message-id: <20021006195605.GA30516@home.debsupport.de>
+* Old-return-path: <michael@home.debsupport.de>
+* User-agent: Mutt/1.3.28i
+
+Hello all
+
+After some discussion between Anthony Towns (a ftpmaster), Jason
+Gunthorpe (APT Developer) and some DDTP Coordinators we find a way to
+transfer the translated package descriptions from the archive to the
+user.
+
+The translated descriptions need to be downloadable befor any
+installation process, like the other package meta information. We
+choose a new file per languages with all translated package
+descriptions. The package system can download one or more of this
+files at 'apt-get update' time and know the translations.
+
+The new files are names 'Translate-$lang' and the file have this
+rfc822-format:
+ Package: &lt;package-name&gt;
+ Description-md5: &lt;the md5 checksum of the english description&gt;
+ Description-$lang.$encoding: &lt;translated headline&gt;
+ &lt;translated section&gt;
+
+The encoding of the Description is 'UTF-8' in all languages normal.
+The files will be located at 'debian/dists/sid/main/i18n/' on the ftp
+server (for all architecture). In addition of the plain
+'Translate-$lang' file, there will be a 'gz' and a 'bz2' version and
+in future also the new incremental format version.
+
+The &lt;the md5 checksum of the english description&gt; is the md5 checksum
+of the full english description, without the 'Description: '-tag and
+with all spaces and newlines. Look at this example:
+ Description: XXX
+ YYY
+ .
+ ZZZ
+is md5(&quot;XXX\n YYY\n .\n ZZZ\n&quot;) (perl-syntax).
+
+
+A future APT version will download one or some 'Translate-$lang'
+file(s) at 'update'-time. After this download it show a translated
+description instead of the english form, if it found a translated
+description of the package with the right md5 chechsum. The enviroment
+of the user will controlled this process (LANG, LANGUAGE, LC_MESSAGES,
+etc). With this the package system will never show a outdated
+translation.
+
+The translations come all from the DDTP. A daily process on
+ddtp.debian.org make new 'Translated-$lang' files and a script on
+ftp-master request this files and move this to the debian archive.
+Now the first files are accessable at
+ <a href="http://ddtp.debian.org/pdesc/translatefiles/">http://ddtp.debian.org/pdesc/translatefiles/</a>
+
+If you found wrong translations, please read the guides on
+ddtp.debian.org, make a better translation and send this per mail to
+the DDTP server. Don't bug the package maintainer!
+
+Thanks
+Grisu
+--
+Michael Bramer - a Debian Linux Developer <a href="http://www.debsupport.de">http://www.debsupport.de</a>
+PGP: finger grisu@db.debian.org -- Linux Sysadmin -- Use Debian Linux
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index acf908ece..9e9f5c2a6 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -730,6 +730,35 @@ void pkgAcqIndex::Done(string Message,unsigned long Size,string MD5,
Mode = decompProg;
}
+// AcqIndexTrans::pkgAcqIndexTrans - Constructor /*{{{*/
+// ---------------------------------------------------------------------
+/* The Translation file is added to the queue */
+pkgAcqIndexTrans::pkgAcqIndexTrans(pkgAcquire *Owner,
+ string URI,string URIDesc,string ShortDesc) :
+ pkgAcqIndex(Owner, URI, URIDesc, ShortDesc, "", "")
+{
+}
+
+ /*}}}*/
+// AcqIndexTrans::Failed - Silence failure messages for missing files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+void pkgAcqIndexTrans::Failed(string Message,pkgAcquire::MethodConfig *Cnf)
+{
+ if (Cnf->LocalOnly == true ||
+ StringToBool(LookupTag(Message,"Transient-Failure"),false) == false)
+ {
+ // Ignore this
+ Status = StatDone;
+ Complete = false;
+ Dequeue();
+ return;
+ }
+
+ Item::Failed(Message,Cnf);
+}
+ /*}}}*/
+
pkgAcqMetaSig::pkgAcqMetaSig(pkgAcquire *Owner,
string URI,string URIDesc,string ShortDesc,
string MetaIndexURI, string MetaIndexURIDesc,
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 3649d7a03..217ddb3ef 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -9,8 +9,8 @@
the Owner Acquire class. Derived classes will then call QueueURI to
register all the URI's they wish to fetch at the initial moment.
- Two item classes are provided to provide functionality for downloading
- of Index files and downloading of Packages.
+ Three item classes are provided to provide functionality for
+ downloading of Index, Translation and Packages files.
A Archive class is provided for downloading .deb files. It does Md5
checking and source location as well as a retry algorithm.
@@ -31,73 +31,283 @@
#pragma interface "apt-pkg/acquire-item.h"
#endif
-// Item to acquire
+/** \addtogroup acquire
+ * @{
+ *
+ * \file acquire-item.h
+ */
+
+/** \brief Represents the process by which a pkgAcquire object should
+ * retrieve a file or a collection of files.
+ *
+ * By convention, Item subclasses should insert themselves into the
+ * acquire queue when they are created by calling QueueURI(), and
+ * remove themselves by calling Dequeue() when either Done() or
+ * Failed() is invoked. Item objects are also responsible for
+ * notifying the download progress indicator (accessible via
+ * #Owner->Log) of their status.
+ *
+ * \see pkgAcquire
+ */
class pkgAcquire::Item
{
protected:
- // Some private helper methods for registering URIs
+ /** \brief The acquire object with which this item is associated. */
pkgAcquire *Owner;
+
+ /** \brief Insert this item into its owner's queue.
+ *
+ * \param ItemDesc Metadata about this item (its URI and
+ * description).
+ */
inline void QueueURI(ItemDesc &Item)
{Owner->Enqueue(Item);};
+
+ /** \brief Remove this item from its owner's queue. */
inline void Dequeue() {Owner->Dequeue(this);};
- // Safe rename function with timestamp preservation
+ /** \brief Rename a file without modifying its timestamp.
+ *
+ * Many item methods call this as their final action.
+ *
+ * \param From The file to be renamed.
+ *
+ * \param To The new name of #From. If #To exists it will be
+ * overwritten.
+ */
void Rename(string From,string To);
public:
- // State of the item
- enum {StatIdle, StatFetching, StatDone, StatError, StatAuthError} Status;
+ /** \brief The current status of this item. */
+ enum ItemState
+ {
+ /** \brief The item is waiting to be downloaded. */
+ StatIdle,
+
+ /** \brief The item is currently being downloaded. */
+ StatFetching,
+
+ /** \brief The item has been successfully downloaded. */
+ StatDone,
+
+ /** \brief An error was encountered while downloading this
+ * item.
+ */
+ StatError,
+
+ /** \brief The item was downloaded but its authenticity could
+ * not be verified.
+ */
+ StatAuthError
+ } Status;
+
+ /** \brief Contains a textual description of the error encountered
+ * if #Status is #StatError or #StatAuthError.
+ */
string ErrorText;
+
+ /** \brief The size of the object to fetch. */
unsigned long FileSize;
- unsigned long PartialSize;
+
+ /** \brief How much of the object was already fetched. */
+ unsigned long PartialSize;
+
+ /** \brief If not \b NULL, contains the name of a subprocess that
+ * is operating on this object (for instance, "gzip" or "gpgv").
+ */
const char *Mode;
+
+ /** \brief A client-supplied unique identifier.
+ *
+ * This field is initalized to 0; it is meant to be filled in by
+ * clients that wish to use it to uniquely identify items.
+ *
+ * \todo it's unused in apt itself
+ */
unsigned long ID;
+
+ /** \brief If \b true, the entire object has been successfully fetched.
+ *
+ * Subclasses should set this to \b true when appropriate.
+ */
bool Complete;
+
+ /** \brief If \b true, the URI of this object is "local".
+ *
+ * The only effect of this field is to exclude the object from the
+ * download progress indicator's overall statistics.
+ */
bool Local;
- // Number of queues we are inserted into
+ /** \brief The number of fetch queues into which this item has been
+ * inserted.
+ *
+ * There is one queue for each source from which an item could be
+ * downloaded.
+ *
+ * \sa pkgAcquire
+ */
unsigned int QueueCounter;
- // File to write the fetch into
+ /** \brief The name of the file into which the retrieved object
+ * will be written.
+ */
string DestFile;
- // Action members invoked by the worker
+ /** \brief Invoked by the acquire worker when the object couldn't
+ * be fetched.
+ *
+ * This is a branch of the continuation of the fetch process.
+ *
+ * \param Message An RFC822-formatted message from the acquire
+ * method describing what went wrong. Use LookupTag() to parse
+ * it.
+ *
+ * \param Cnf The method via which the worker tried to fetch this object.
+ *
+ * \sa pkgAcqMethod
+ */
virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
+
+ /** \brief Invoked by the acquire worker when the object was
+ * fetched successfully.
+ *
+ * Note that the object might \e not have been written to
+ * DestFile; check for the presence of an Alt-Filename entry in
+ * Message to find the file to which it was really written.
+ *
+ * Done is often used to switch from one stage of the processing
+ * to the next (e.g. fetching, unpacking, copying). It is one
+ * branch of the continuation of the fetch process.
+ *
+ * \param Message Data from the acquire method. Use LookupTag()
+ * to parse it.
+ * \param Size The size of the object that was fetched.
+ * \param Md5Hash The MD5Sum of the object that was fetched.
+ * \param Cnf The method via which the object was fetched.
+ *
+ * \sa pkgAcqMethod
+ */
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
+
+ /** \brief Invoked when the worker starts to fetch this object.
+ *
+ * \param Message RFC822-formatted data from the worker process.
+ * Use LookupTag() to parse it.
+ *
+ * \param Size The size of the object being fetched.
+ *
+ * \sa pkgAcqMethod
+ */
virtual void Start(string Message,unsigned long Size);
+
+ /** \brief Custom headers to be sent to the fetch process.
+ *
+ * \return a string containing RFC822-style headers that are to be
+ * inserted into the 600 URI Acquire message sent to the fetch
+ * subprocess. The headers are inserted after a newline-less
+ * line, so they should (if nonempty) have a leading newline and
+ * no trailing newline.
+ */
virtual string Custom600Headers() {return string();};
+
+ /** \brief A "descriptive" URI-like string.
+ *
+ * \return a URI that should be used to describe what is being fetched.
+ */
virtual string DescURI() = 0;
+ /** \brief Short item description.
+ *
+ * \return a brief description of the object being fetched.
+ */
virtual string ShortDesc() {return DescURI();}
+
+ /** \brief Invoked by the worker when the download is completely done. */
virtual void Finished() {};
- // Inquire functions
+ /** \brief MD5Sum.
+ *
+ * \return the MD5Sum of this object, if applicable; otherwise, an
+ * empty string.
+ */
virtual string MD5Sum() {return string();};
+
+ /** \return the acquire process with which this item is associated. */
pkgAcquire *GetOwner() {return Owner;};
+
+ /** \return \b true if this object is being fetched from a trusted source. */
virtual bool IsTrusted() {return false;};
-
+
+ /** \brief Initialize an item.
+ *
+ * Adds the item to the list of items known to the acquire
+ * process, but does not place it into any fetch queues (you must
+ * manually invoke QueueURI() to do so).
+ *
+ * Initializes all fields of the item other than Owner to 0,
+ * false, or the empty string.
+ *
+ * \param Owner The new owner of this item.
+ */
Item(pkgAcquire *Owner);
+
+ /** \brief Remove this item from its owner's queue by invoking
+ * pkgAcquire::Remove.
+ */
virtual ~Item();
};
-// item for index diffs
-
+/** \brief Information about an index patch (aka diff). */
struct DiffInfo {
+ /** The filename of the diff. */
string file;
+
+ /** The sha1 hash of the diff. */
string sha1;
+
+ /** The size of the diff. */
unsigned long size;
};
+/** \brief An item that is responsible for fetching an index file of
+ * package list diffs and starting the package list's download.
+ *
+ * This item downloads the Index file and parses it, then enqueues
+ * additional downloads of either the individual patches (using
+ * pkgAcqIndexDiffs) or the entire Packages file (using pkgAcqIndex).
+ *
+ * \sa pkgAcqIndexDiffs, pkgAcqIndex
+ */
class pkgAcqDiffIndex : public pkgAcquire::Item
{
protected:
+ /** \brief If \b true, debugging information will be written to std::clog. */
bool Debug;
+
+ /** \brief The item that is currently being downloaded. */
pkgAcquire::ItemDesc Desc;
+
+ /** \brief The URI of the index file to recreate at our end (either
+ * by downloading it or by applying partial patches).
+ */
string RealURI;
+
+ /** \brief The MD5Sum that the real index file should have after
+ * all patches have been applied.
+ */
string ExpectedMD5;
+
+ /** \brief The index file which will be patched to generate the new
+ * file.
+ */
string CurrentPackagesFile;
+
+ /** \brief A description of the Packages file (stored in
+ * pkgAcquire::ItemDesc::Description).
+ */
string Description;
public:
@@ -108,54 +318,199 @@ class pkgAcqDiffIndex : public pkgAcquire::Item
virtual string DescURI() {return RealURI + "Index";};
virtual string Custom600Headers();
- // helpers
+ /** \brief Parse the Index file for a set of Packages diffs.
+ *
+ * Parses the Index file and creates additional download items as
+ * necessary.
+ *
+ * \param IndexDiffFile The name of the Index file.
+ *
+ * \return \b true if the Index file was successfully parsed, \b
+ * false otherwise.
+ */
bool ParseDiffIndex(string IndexDiffFile);
+
+ /** \brief Create a new pkgAcqDiffIndex.
+ *
+ * \param Owner The Acquire object that owns this item.
+ *
+ * \param URI The URI of the list file to download.
+ *
+ * \param URIDesc A long description of the list file to download.
+ *
+ * \param ShortDesc A short description of the list file to download.
+ *
+ * \param ExpectedMD5 The list file's MD5 signature.
+ */
pkgAcqDiffIndex(pkgAcquire *Owner,string URI,string URIDesc,
- string ShortDesct, string ExpectedMD5);
+ string ShortDesc, string ExpectedMD5);
};
+/** \brief An item that is responsible for fetching all the patches
+ * that need to be applied to a given package index file.
+ *
+ * After downloading and applying a single patch, this item will
+ * enqueue a new pkgAcqIndexDiffs to download and apply the remaining
+ * patches. If no patch can be found that applies to an intermediate
+ * file or if one of the patches cannot be downloaded, falls back to
+ * downloading the entire package index file using pkgAcqIndex.
+ *
+ * \sa pkgAcqDiffIndex, pkgAcqIndex
+ */
class pkgAcqIndexDiffs : public pkgAcquire::Item
{
+ private:
+
+ /** \brief Queue up the next diff download.
+ *
+ * Search for the next available diff that applies to the file
+ * that currently exists on disk, and enqueue it by calling
+ * QueueURI().
+ *
+ * \return \b true if an applicable diff was found, \b false
+ * otherwise.
+ */
+ bool QueueNextDiff();
+
+ /** \brief Handle tasks that must be performed after the item
+ * finishes downloading.
+ *
+ * Dequeues the item and checks the resulting file's md5sum
+ * against ExpectedMD5 after the last patch was applied.
+ * There is no need to check the md5/sha1 after a "normal"
+ * patch because QueueNextDiff() will check the sha1 later.
+ *
+ * \param allDone If \b true, the file was entirely reconstructed,
+ * and its md5sum is verified.
+ */
+ void Finish(bool allDone=false);
+
protected:
+
+ /** \brief If \b true, debugging output will be written to
+ * std::clog.
+ */
bool Debug;
+
+ /** \brief A description of the item that is currently being
+ * downloaded.
+ */
pkgAcquire::ItemDesc Desc;
+
+ /** \brief The URI of the package index file that is being
+ * reconstructed.
+ */
string RealURI;
+
+ /** \brief The MD5Sum of the package index file that is being
+ * reconstructed.
+ */
string ExpectedMD5;
- // this is the SHA-1 sum we expect after the patching
+ /** A description of the file being downloaded. */
string Description;
+
+ /** The patches that remain to be downloaded, including the patch
+ * being downloaded right now. This list should be ordered so
+ * that each diff appears before any diff that depends on it.
+ *
+ * \todo These are indexed by sha1sum; why not use some sort of
+ * dictionary instead of relying on ordering and stripping them
+ * off the front?
+ */
vector<DiffInfo> available_patches;
- enum {StateFetchIndex,StateFetchDiff,StateUnzipDiff,StateApplyDiff} State;
+ /** The current status of this patch. */
+ enum DiffState
+ {
+ /** \brief The diff is in an unknown state. */
+ StateFetchUnkown,
+
+ /** \brief The diff is currently being fetched. */
+ StateFetchDiff,
+
+ /** \brief The diff is currently being uncompressed. */
+ StateUnzipDiff,
+
+ /** \brief The diff is currently being applied. */
+ StateApplyDiff
+ } State;
public:
- // Specialized action members
+ /** \brief Called when the patch file failed to be downloaded.
+ *
+ * This method will fall back to downloading the whole index file
+ * outright; its arguments are ignored.
+ */
virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
+
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
virtual string DescURI() {return RealURI + "Index";};
- // various helpers
- bool QueueNextDiff();
- bool ApplyDiff(string PatchFile);
- void Finish(bool allDone=false);
-
+ /** \brief Create an index diff item.
+ *
+ * After filling in its basic fields, this invokes Finish(true) if
+ * #diffs is empty, or QueueNextDiff() otherwise.
+ *
+ * \param Owner The pkgAcquire object that owns this item.
+ *
+ * \param URI The URI of the package index file being
+ * reconstructed.
+ *
+ * \param URIDesc A long description of this item.
+ *
+ * \param ShortDesc A brief description of this item.
+ *
+ * \param ExpectedMD5 The expected md5sum of the completely
+ * reconstructed package index file; the index file will be tested
+ * against this value when it is entirely reconstructed.
+ *
+ * \param diffs The remaining diffs from the index of diffs. They
+ * should be ordered so that each diff appears before any diff
+ * that depends on it.
+ */
pkgAcqIndexDiffs(pkgAcquire *Owner,string URI,string URIDesc,
- string ShortDesct, string ExpectedMD5,
+ string ShortDesc, string ExpectedMD5,
vector<DiffInfo> diffs=vector<DiffInfo>());
};
-// Item class for index files
+/** \brief An acquire item that is responsible for fetching an index
+ * file (e.g., Packages or Sources).
+ *
+ * \sa pkgAcqDiffIndex, pkgAcqIndexDiffs, pkgAcqIndexTrans
+ *
+ * \todo Why does pkgAcqIndex have protected members?
+ */
class pkgAcqIndex : public pkgAcquire::Item
{
protected:
-
+
+ /** \brief If \b true, the index file has been decompressed. */
bool Decompression;
+
+ /** \brief If \b true, the partially downloaded file will be
+ * removed when the download completes.
+ */
bool Erase;
+
+ /** \brief The download request that is currently being
+ * processed.
+ */
pkgAcquire::ItemDesc Desc;
+
+ /** \brief The object that is actually being fetched (minus any
+ * compression-related extensions).
+ */
string RealURI;
+
+ /** \brief The expected md5sum of the decompressed index file. */
string ExpectedMD5;
+
+ /** \brief The compression-related file extension that is being
+ * added to the downloaded file (e.g., ".gz" or ".bz2").
+ */
string CompressionExtension;
public:
@@ -167,26 +522,120 @@ class pkgAcqIndex : public pkgAcquire::Item
virtual string Custom600Headers();
virtual string DescURI() {return RealURI + CompressionExtension;};
+ /** \brief Create a pkgAcqIndex.
+ *
+ * \param Owner The pkgAcquire object with which this item is
+ * associated.
+ *
+ * \param URI The URI of the index file that is to be downloaded.
+ *
+ * \param URIDesc A "URI-style" description of this index file.
+ *
+ * \param ShortDesc A brief description of this index file.
+ *
+ * \param ExpectedMD5 The expected md5sum of this index file.
+ *
+ * \param compressExt The compression-related extension with which
+ * this index file should be downloaded, or "" to autodetect
+ * (".bz2" is used if bzip2 is installed, ".gz" otherwise).
+ */
pkgAcqIndex(pkgAcquire *Owner,string URI,string URIDesc,
- string ShortDesct, string ExpectedMD5, string compressExt="");
+ string ShortDesc, string ExpectedMD5, string compressExt="");
+};
+
+/** \brief An acquire item that is responsible for fetching a
+ * translated index file.
+ *
+ * The only difference from pkgAcqIndex is that transient failures
+ * are suppressed: no error occurs if the translated index file is
+ * missing.
+ */
+class pkgAcqIndexTrans : public pkgAcqIndex
+{
+ public:
+
+ virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
+
+ /** \brief Create a pkgAcqIndexTrans.
+ *
+ * \param Owner The pkgAcquire object with which this item is
+ * associated.
+ *
+ * \param URI The URI of the index file that is to be downloaded.
+ *
+ * \param URIDesc A "URI-style" description of this index file.
+ *
+ * \param ShortDesc A brief description of this index file.
+ *
+ * \param ExpectedMD5 The expected md5sum of this index file.
+ *
+ * \param compressExt The compression-related extension with which
+ * this index file should be downloaded, or "" to autodetect
+ * (".bz2" is used if bzip2 is installed, ".gz" otherwise).
+ */
+ pkgAcqIndexTrans(pkgAcquire *Owner,string URI,string URIDesc,
+ string ShortDesc);
};
+/** \brief Information about an index file. */
struct IndexTarget
{
+ /** \brief A URI from which the index file can be downloaded. */
string URI;
+
+ /** \brief A description of the index file. */
string Description;
+
+ /** \brief A shorter description of the index file. */
string ShortDesc;
+
+ /** \brief The key by which this index file should be
+ * looked up within the meta signature file.
+ */
string MetaKey;
};
-// Item class for index signatures
+/** \brief An acquire item that downloads the detached signature
+ * of a meta-index (Release) file, then queues up the release
+ * file itself.
+ *
+ * \todo Why protected members?
+ *
+ * \sa pkgAcqMetaIndex
+ */
class pkgAcqMetaSig : public pkgAcquire::Item
{
protected:
-
+ /** \brief The fetch request that is currently being processed. */
pkgAcquire::ItemDesc Desc;
- string RealURI,MetaIndexURI,MetaIndexURIDesc,MetaIndexShortDesc;
+
+ /** \brief The URI of the signature file. Unlike Desc.URI, this is
+ * never modified; it is used to determine the file that is being
+ * downloaded.
+ */
+ string RealURI;
+
+ /** \brief The URI of the meta-index file to be fetched after the signature. */
+ string MetaIndexURI;
+
+ /** \brief A "URI-style" description of the meta-index file to be
+ * fetched after the signature.
+ */
+ string MetaIndexURIDesc;
+
+ /** \brief A brief description of the meta-index file to be fetched
+ * after the signature.
+ */
+ string MetaIndexShortDesc;
+
+ /** \brief A package-system-specific parser for the meta-index file. */
indexRecords* MetaIndexParser;
+
+ /** \brief The index files which should be looked up in the meta-index
+ * and then downloaded.
+ *
+ * \todo Why a list of pointers instead of a list of structs?
+ */
const vector<struct IndexTarget*>* IndexTargets;
public:
@@ -198,29 +647,90 @@ class pkgAcqMetaSig : public pkgAcquire::Item
virtual string Custom600Headers();
virtual string DescURI() {return RealURI; };
+ /** \brief Create a new pkgAcqMetaSig. */
pkgAcqMetaSig(pkgAcquire *Owner,string URI,string URIDesc, string ShortDesc,
string MetaIndexURI, string MetaIndexURIDesc, string MetaIndexShortDesc,
const vector<struct IndexTarget*>* IndexTargets,
indexRecords* MetaIndexParser);
};
-// Item class for index signatures
+/** \brief An item that is responsible for downloading the meta-index
+ * file (i.e., Release) itself and verifying its signature.
+ *
+ * Once the download and verification are complete, the downloads of
+ * the individual index files are queued up using pkgAcqDiffIndex.
+ * If the meta-index file had a valid signature, the expected md5sums
+ * of the index files will be the md5sums listed in the meta-index;
+ * otherwise, the expected md5sums will be "" (causing the
+ * authentication of the index files to be bypassed).
+ */
class pkgAcqMetaIndex : public pkgAcquire::Item
{
protected:
-
+ /** \brief The fetch command that is currently being processed. */
pkgAcquire::ItemDesc Desc;
- string RealURI; // FIXME: is this redundant w/ Desc.URI?
+
+ /** \brief The URI that is actually being downloaded; never
+ * modified by pkgAcqMetaIndex.
+ */
+ string RealURI;
+
+ /** \brief The file in which the signature for this index was stored.
+ *
+ * If empty, the signature and the md5sums of the individual
+ * indices will not be checked.
+ */
string SigFile;
+
+ /** \brief The index files to download. */
const vector<struct IndexTarget*>* IndexTargets;
+
+ /** \brief The parser for the meta-index file. */
indexRecords* MetaIndexParser;
+
+ /** \brief If \b true, the index's signature is currently being verified.
+ */
bool AuthPass;
// required to deal gracefully with problems caused by incorrect ims hits
bool IMSHit;
+ /** \brief Check that the release file is a release file for the
+ * correct distribution.
+ *
+ * \return \b true if no fatal errors were encountered.
+ */
bool VerifyVendor(string Message);
+
+ /** \brief Called when a file is finished being retrieved.
+ *
+ * If the file was not downloaded to DestFile, a copy process is
+ * set up to copy it to DestFile; otherwise, Complete is set to \b
+ * true and the file is moved to its final location.
+ *
+ * \param Message The message block received from the fetch
+ * subprocess.
+ */
void RetrievalDone(string Message);
+
+ /** \brief Called when authentication succeeded.
+ *
+ * Sanity-checks the authenticated file, queues up the individual
+ * index files for download, and saves the signature in the lists
+ * directory next to the authenticated list file.
+ *
+ * \param Message The message block received from the fetch
+ * subprocess.
+ */
void AuthDone(string Message);
+
+ /** \brief Starts downloading the individual index files.
+ *
+ * \param verify If \b true, only indices whose expected md5sum
+ * can be determined from the meta-index will be downloaded, and
+ * the md5sums of indices will be checked (reporting
+ * #StatAuthError if there is a mismatch). If verify is \b false,
+ * no md5sum checking will be performed.
+ */
void QueueIndexes(bool verify);
public:
@@ -232,6 +742,7 @@ class pkgAcqMetaIndex : public pkgAcquire::Item
virtual string Custom600Headers();
virtual string DescURI() {return RealURI; };
+ /** \brief Create a new pkgAcqMetaIndex. */
pkgAcqMetaIndex(pkgAcquire *Owner,
string URI,string URIDesc, string ShortDesc,
string SigFile,
@@ -239,28 +750,58 @@ class pkgAcqMetaIndex : public pkgAcquire::Item
indexRecords* MetaIndexParser);
};
-// Item class for archive files
+/** \brief An item that is responsible for fetching a package file.
+ *
+ * If the package file already exists in the cache, nothing will be
+ * done.
+ */
class pkgAcqArchive : public pkgAcquire::Item
{
protected:
-
- // State information for the retry mechanism
+ /** \brief The package version being fetched. */
pkgCache::VerIterator Version;
+
+ /** \brief The fetch command that is currently being processed. */
pkgAcquire::ItemDesc Desc;
+
+ /** \brief The list of sources from which to pick archives to
+ * download this package from.
+ */
pkgSourceList *Sources;
+
+ /** \brief A package records object, used to look up the file
+ * corresponding to each version of the package.
+ */
pkgRecords *Recs;
+
+ /** \brief The md5sum of this package. */
string MD5;
+
+ /** \brief A location in which the actual filename of the package
+ * should be stored.
+ */
string &StoreFilename;
+
+ /** \brief The next file for this version to try to download. */
pkgCache::VerFileIterator Vf;
+
+ /** \brief How many (more) times to try to find a new source from
+ * which to download this package version if it fails.
+ *
+ * Set from Acquire::Retries.
+ */
unsigned int Retries;
+
+ /** \brief \b true if this version file is being downloaded from a
+ * trusted source.
+ */
bool Trusted;
- // Queue the next available file for download.
+ /** \brief Queue up the next available file for this version. */
bool QueueNext();
public:
- // Specialized action members
virtual void Failed(string Message,pkgAcquire::MethodConfig *Cnf);
virtual void Done(string Message,unsigned long Size,string Md5Hash,
pkgAcquire::MethodConfig *Cnf);
@@ -268,18 +809,49 @@ class pkgAcqArchive : public pkgAcquire::Item
virtual string DescURI() {return Desc.URI;};
virtual string ShortDesc() {return Desc.ShortDesc;};
virtual void Finished();
+
virtual bool IsTrusted();
+ /** \brief Create a new pkgAcqArchive.
+ *
+ * \param Owner The pkgAcquire object with which this item is
+ * associated.
+ *
+ * \param Sources The sources from which to download version
+ * files.
+ *
+ * \param Recs A package records object, used to look up the file
+ * corresponding to each version of the package.
+ *
+ * \param Version The package version to download.
+ *
+ * \param StoreFilename A location in which the actual filename of
+ * the package should be stored. It will be set to a guessed
+ * basename in the constructor, and filled in with a fully
+ * qualified filename once the download finishes.
+ */
pkgAcqArchive(pkgAcquire *Owner,pkgSourceList *Sources,
pkgRecords *Recs,pkgCache::VerIterator const &Version,
string &StoreFilename);
};
-// Fetch a generic file to the current directory
+/** \brief Retrieve an arbitrary file to the current directory.
+ *
+ * The file is retrieved even if it is accessed via a URL type that
+ * normally is a NOP, such as "file". If the download fails, the
+ * partial file is renamed to get a ".FAILED" extension.
+ */
class pkgAcqFile : public pkgAcquire::Item
{
+ /** \brief The currently active download process. */
pkgAcquire::ItemDesc Desc;
+
+ /** \brief The md5sum of the file to download, if it is known. */
string Md5Hash;
+
+ /** \brief How many times to retry the download, set from
+ * Acquire::Retries.
+ */
unsigned int Retries;
public:
@@ -291,13 +863,40 @@ class pkgAcqFile : public pkgAcquire::Item
virtual string MD5Sum() {return Md5Hash;};
virtual string DescURI() {return Desc.URI;};
- // If DestFilename is empty, download to DestDir/<basename> if
- // DestDir is non-empty, $CWD/<basename> otherwise. If
- // DestFilename is NOT empty, DestDir is ignored and DestFilename
- // is the absolute name to which the file should be downloaded.
+ /** \brief Create a new pkgAcqFile object.
+ *
+ * \param Owner The pkgAcquire object with which this object is
+ * associated.
+ *
+ * \param URI The URI to download.
+ *
+ * \param MD5 The md5sum of the file to download, if it is known;
+ * otherwise "".
+ *
+ * \param Size The size of the file to download, if it is known;
+ * otherwise 0.
+ *
+ * \param Desc A description of the file being downloaded.
+ *
+ * \param ShortDesc A brief description of the file being
+ * downloaded.
+ *
+ * \param DestDir The directory the file should be downloaded into.
+ *
+ * \param DestFilename The filename+path the file is downloaded to.
+ *
+ *
+ * If DestFilename is empty, download to DestDir/<basename> if
+ * DestDir is non-empty, $CWD/<basename> otherwise. If
+ * DestFilename is NOT empty, DestDir is ignored and DestFilename
+ * is the absolute name to which the file should be downloaded.
+ */
+
pkgAcqFile(pkgAcquire *Owner, string URI, string MD5, unsigned long Size,
string Desc, string ShortDesc,
const string &DestDir="", const string &DestFilename="");
};
+/** @} */
+
#endif
diff --git a/apt-pkg/acquire-method.h b/apt-pkg/acquire-method.h
index f46209d12..4f08a43ae 100644
--- a/apt-pkg/acquire-method.h
+++ b/apt-pkg/acquire-method.h
@@ -10,6 +10,13 @@
##################################################################### */
/*}}}*/
+
+/** \addtogroup acquire
+ * @{
+ *
+ * \file acquire-method.h
+ */
+
#ifndef PKGLIB_ACQUIRE_METHOD_H
#define PKGLIB_ACQUIRE_METHOD_H
@@ -86,4 +93,6 @@ class pkgAcqMethod
virtual ~pkgAcqMethod() {};
};
+/** @} */
+
#endif
diff --git a/apt-pkg/acquire-worker.h b/apt-pkg/acquire-worker.h
index 6e1952202..1f6bcc05f 100644
--- a/apt-pkg/acquire-worker.h
+++ b/apt-pkg/acquire-worker.h
@@ -9,6 +9,13 @@
##################################################################### */
/*}}}*/
+
+/** \addtogroup acquire
+ * @{
+ *
+ * \file acquire-worker.h
+ */
+
#ifndef PKGLIB_ACQUIRE_WORKER_H
#define PKGLIB_ACQUIRE_WORKER_H
@@ -18,7 +25,25 @@
#pragma interface "apt-pkg/acquire-worker.h"
#endif
-// Interfacing to the method process
+/** \brief A fetch subprocess.
+ *
+ * A worker process is responsible for one stage of the fetch. This
+ * class encapsulates the communications protocol between the master
+ * process and the worker, from the master end.
+ *
+ * Each worker is intrinsically placed on two linked lists. The
+ * Queue list (maintained in the #NextQueue variable) is maintained
+ * by the pkgAcquire::Queue class; it represents the set of workers
+ * assigned to a particular queue. The Acquire list (maintained in
+ * the #NextAcquire variable) is maintained by the pkgAcquire class;
+ * it represents the set of active workers for a particular
+ * pkgAcquire object.
+ *
+ * \todo Like everything else in the Acquire system, this has way too
+ * many protected items.
+ *
+ * \sa pkgAcqMethod, pkgAcquire::Item, pkgAcquire
+ */
class pkgAcquire::Worker
{
friend class pkgAcquire;
@@ -26,64 +51,274 @@ class pkgAcquire::Worker
protected:
friend class Queue;
- /* Linked list starting at a Queue and a linked list starting
- at Acquire */
+ /** \brief The next link on the Queue list.
+ *
+ * \todo This is always NULL; is it just for future use?
+ */
Worker *NextQueue;
+
+ /** \brief The next link on the Acquire list. */
Worker *NextAcquire;
- // The access association
+ /** \brief The Queue with which this worker is associated. */
Queue *OwnerQ;
+
+ /** \brief The download progress indicator to which progress
+ * messages should be sent.
+ */
pkgAcquireStatus *Log;
+
+ /** \brief The configuration of this method. On startup, the
+ * target of this pointer is filled in with basic data about the
+ * method, as reported by the worker.
+ */
MethodConfig *Config;
+
+ /** \brief The access method to be used by this worker.
+ *
+ * \todo Doesn't this duplicate Config->Access?
+ */
string Access;
- // This is the subprocess IPC setup
+ /** \brief The PID of the subprocess. */
pid_t Process;
+
+ /** \brief A file descriptor connected to the standard output of
+ * the subprocess.
+ *
+ * Used to read messages and data from the subprocess.
+ */
int InFd;
+
+ /** \brief A file descriptor connected to the standard input of the
+ * subprocess.
+ *
+ * Used to send commands and configuration data to the subprocess.
+ */
int OutFd;
+
+ /** \brief Set to \b true if the worker is in a state in which it
+ * might generate data or command responses.
+ *
+ * \todo Is this right? It's a guess.
+ */
bool InReady;
+
+ /** \brief Set to \b true if the worker is in a state in which it
+ * is legal to send commands to it.
+ *
+ * \todo Is this right?
+ */
bool OutReady;
- // Various internal things
+ /** If \b true, debugging output will be sent to std::clog. */
bool Debug;
+
+ /** \brief The raw text values of messages received from the
+ * worker, in sequence.
+ */
vector<string> MessageQueue;
+
+ /** \brief Buffers pending writes to the subprocess.
+ *
+ * \todo Wouldn't a std::dequeue be more appropriate?
+ */
string OutQueue;
- // Private constructor helper
+ /** \brief Common code for the constructor.
+ *
+ * Initializes NextQueue and NextAcquire to NULL; Process, InFd,
+ * and OutFd to -1, OutReady and InReady to \b false, and Debug
+ * from _config.
+ */
void Construct();
- // Message handling things
+ /** \brief Retrieve any available messages from the subprocess.
+ *
+ * The messages are retrieved as in ::ReadMessages(), and
+ * MessageFailure() is invoked if an error occurs; in particular,
+ * if the pipe to the subprocess dies unexpectedly while a message
+ * is being read.
+ *
+ * \return \b true if the messages were successfully read, \b
+ * false otherwise.
+ */
bool ReadMessages();
+
+ /** \brief Parse and dispatch pending messages.
+ *
+ * This dispatches the message in a manner appropriate for its
+ * type.
+ *
+ * \todo Several message types lack separate handlers.
+ *
+ * \sa Capabilities(), SendConfiguration(), MediaChange()
+ */
bool RunMessages();
+
+ /** \brief Read and dispatch any pending messages from the
+ * subprocess.
+ *
+ * \return \b false if the subprocess died unexpectedly while a
+ * message was being transmitted.
+ */
bool InFdReady();
+
+ /** \brief Send any pending commands to the subprocess.
+ *
+ * This method will fail if there is no pending output.
+ *
+ * \return \b true if all commands were succeeded, \b false if an
+ * error occurred (in which case MethodFailure() will be invoked).
+ */
bool OutFdReady();
- // The message handlers
+ /** \brief Handle a 100 Capabilities response from the subprocess.
+ *
+ * \param Message the raw text of the message from the subprocess.
+ *
+ * The message will be parsed and its contents used to fill
+ * #Config. If #Config is NULL, this routine is a NOP.
+ *
+ * \return \b true.
+ */
bool Capabilities(string Message);
+
+ /** \brief Send a 601 Configuration message (containing the APT
+ * configuration) to the subprocess.
+ *
+ * The APT configuration will be send to the subprocess in a
+ * message of the following form:
+ *
+ * <pre>
+ * 601 Configuration
+ * Config-Item: Fully-Qualified-Item=Val
+ * Config-Item: Fully-Qualified-Item=Val
+ * ...
+ * </pre>
+ *
+ * \return \b true if the command was successfully sent, \b false
+ * otherwise.
+ */
bool SendConfiguration();
+
+ /** \brief Handle a 403 Media Change message.
+ *
+ * \param Message the raw text of the message; the Media field
+ * indicates what type of media should be changed, and the Drive
+ * field indicates where the media is located.
+ *
+ * Invokes pkgAcquireStatus::MediaChange(Media, Drive) to ask the
+ * user to swap disks; informs the subprocess of the result (via
+ * 603 Media Changed, with the Failed field set to \b true if the
+ * user cancelled the media change).
+ */
bool MediaChange(string Message);
+ /** \brief Invoked when the worked process dies unexpectedly.
+ *
+ * Waits for the subprocess to terminate and generates an error if
+ * it terminated abnormally, then closes and blanks out all file
+ * descriptors. Discards all pending messages from the
+ * subprocess.
+ *
+ * \return \b false.
+ */
bool MethodFailure();
+
+ /** \brief Invoked when a fetch job is completed, either
+ * successfully or unsuccessfully.
+ *
+ * Resets the status information for the worker process.
+ */
void ItemDone();
public:
- // The curent method state
+ /** \brief The queue entry that is currently being downloaded. */
pkgAcquire::Queue::QItem *CurrentItem;
+
+ /** \brief The most recent status string received from the
+ * subprocess.
+ */
string Status;
+
+ /** \brief How many bytes of the file have been downloaded. Zero
+ * if the current progress of the file cannot be determined.
+ */
unsigned long CurrentSize;
+
+ /** \brief The total number of bytes to be downloaded. Zero if the
+ * total size of the final is unknown.
+ */
unsigned long TotalSize;
+
+ /** \brief How much of the file was already downloaded prior to
+ * starting this worker.
+ */
unsigned long ResumePoint;
- // Load the method and do the startup
+ /** \brief Tell the subprocess to download the given item.
+ *
+ * \param Item the item to queue up.
+ * \return \b true if the item was successfully enqueued.
+ *
+ * Queues up a 600 URI Acquire message for the given item to be
+ * sent at the next possible moment. Does \e not flush the output
+ * queue.
+ */
bool QueueItem(pkgAcquire::Queue::QItem *Item);
+
+ /** \brief Start up the worker and fill in #Config.
+ *
+ * Reads the first message from the worker, which is assumed to be
+ * a 100 Capabilities message.
+ *
+ * \return \b true if all operations completed successfully.
+ */
bool Start();
+
+ /** \brief Update the worker statistics (CurrentSize, TotalSize,
+ * etc).
+ */
void Pulse();
+
+ /** \return The fetch method configuration. */
inline const MethodConfig *GetConf() const {return Config;};
-
+
+ /** \brief Create a new Worker to download files.
+ *
+ * \param OwnerQ The queue into which this worker should be
+ * placed.
+ *
+ * \param Config A location in which to store information about
+ * the fetch method.
+ *
+ * \param Log The download progress indicator that should be used
+ * to report the progress of this worker.
+ */
Worker(Queue *OwnerQ,MethodConfig *Config,pkgAcquireStatus *Log);
+
+ /** \brief Create a new Worker that should just retrieve
+ * information about the fetch method.
+ *
+ * Nothing in particular forces you to refrain from actually
+ * downloading stuff, but the various status callbacks won't be
+ * invoked.
+ *
+ * \param Config A location in which to store information about
+ * the fetch method.
+ */
Worker(MethodConfig *Config);
+
+ /** \brief Clean up this worker.
+ *
+ * Closes the file descriptors; if MethodConfig::NeedsCleanup is
+ * \b false, also rudely interrupts the worker with a SIGINT.
+ */
~Worker();
};
+/** @} */
+
#endif
diff --git a/apt-pkg/acquire.h b/apt-pkg/acquire.h
index 27bb3d363..64dafdc9d 100644
--- a/apt-pkg/acquire.h
+++ b/apt-pkg/acquire.h
@@ -29,6 +29,40 @@
##################################################################### */
/*}}}*/
+
+/** \defgroup acquire Acquire system
+ *
+ * \brief The Acquire system is responsible for retrieving files from
+ * local or remote URIs and postprocessing them (for instance,
+ * verifying their authenticity). The core class in this system is
+ * pkgAcquire, which is responsible for managing the download queues
+ * during the download. There is at least one download queue for
+ * each supported protocol; protocols such as http may provide one
+ * queue per host.
+ *
+ * Each file to download is represented by a subclass of
+ * pkgAcquire::Item. The files add themselves to the download
+ * queue(s) by providing their URI information to
+ * pkgAcquire::Item::QueueURI, which calls pkgAcquire::Enqueue.
+ *
+ * Once the system is set up, the Run method will spawn subprocesses
+ * to handle the enqueued URIs; the scheduler will then take items
+ * from the queues and feed them into the handlers until the queues
+ * are empty.
+ *
+ * \todo Acquire supports inserting an object into several queues at
+ * once, but it is not clear what its behavior in this case is, and
+ * no subclass of pkgAcquire::Item seems to actually use this
+ * capability.
+ */
+
+/** \addtogroup acquire
+ *
+ * @{
+ *
+ * \file acquire.h
+ */
+
#ifndef PKGLIB_ACQUIRE_H
#define PKGLIB_ACQUIRE_H
@@ -46,6 +80,15 @@ using std::string;
#include <unistd.h>
class pkgAcquireStatus;
+
+/** \brief The core download scheduler.
+ *
+ * This class represents an ongoing download. It manages the lists
+ * of active and pending downloads and handles setting up and tearing
+ * down download-related structures.
+ *
+ * \todo Why all the protected data items and methods?
+ */
class pkgAcquire
{
public:
@@ -60,97 +103,299 @@ class pkgAcquire
typedef vector<Item *>::iterator ItemIterator;
typedef vector<Item *>::const_iterator ItemCIterator;
-
+
protected:
- // List of items to fetch
+ /** \brief A list of items to download.
+ *
+ * This is built monotonically as items are created and only
+ * emptied when the download shuts down.
+ */
vector<Item *> Items;
- // List of active queues and fetched method configuration parameters
+ /** \brief The head of the list of active queues.
+ *
+ * \todo why a hand-managed list of queues instead of std::list or
+ * std::set?
+ */
Queue *Queues;
+
+ /** \brief The head of the list of active workers.
+ *
+ * \todo why a hand-managed list of workers instead of std::list
+ * or std::set?
+ */
Worker *Workers;
+
+ /** \brief The head of the list of acquire method configurations.
+ *
+ * Each protocol (http, ftp, gzip, etc) via which files can be
+ * fetched can have a representation in this list. The
+ * configuration data is filled in by parsing the 100 Capabilities
+ * string output by a method on startup (see
+ * pkgAcqMethod::pkgAcqMethod and pkgAcquire::GetConfig).
+ *
+ * \todo why a hand-managed config dictionary instead of std::map?
+ */
MethodConfig *Configs;
+
+ /** \brief The progress indicator for this download. */
pkgAcquireStatus *Log;
+
+ /** \brief The total size of the files which are to be fetched.
+ *
+ * This is not necessarily the total number of bytes to download
+ * when, e.g., download resumption and list updates via patches
+ * are taken into account.
+ */
unsigned long ToFetch;
- // Configurable parameters for the schedular
- enum {QueueHost,QueueAccess} QueueMode;
+ // Configurable parameters for the scheduler
+
+ /** \brief Represents the queuing strategy for remote URIs. */
+ enum QueueStrategy {
+ /** \brief Generate one queue for each protocol/host combination; downloads from
+ * multiple hosts can proceed in parallel.
+ */
+ QueueHost,
+ /** \brief Generate a single queue for each protocol; serialize
+ * downloads from multiple hosts.
+ */
+ QueueAccess} QueueMode;
+
+ /** \brief If \b true, debugging information will be dumped to std::clog. */
bool Debug;
+ /** \brief If \b true, a download is currently in progress. */
bool Running;
-
+
+ /** \brief Add the given item to the list of items. */
void Add(Item *Item);
+
+ /** \brief Remove the given item from the list of items. */
void Remove(Item *Item);
+
+ /** \brief Add the given worker to the list of workers. */
void Add(Worker *Work);
+
+ /** \brief Remove the given worker from the list of workers. */
void Remove(Worker *Work);
+ /** \brief Insert the given fetch request into the appropriate queue.
+ *
+ * \param Item The URI to download and the item to download it
+ * for. Copied by value into the queue; no reference to Item is
+ * retained.
+ */
void Enqueue(ItemDesc &Item);
+
+ /** \brief Remove all fetch requests for this item from all queues. */
void Dequeue(Item *Item);
+
+ /** \brief Determine the fetch method and queue of a URI.
+ *
+ * \param URI The URI to fetch.
+ *
+ * \param[out] Config A location in which to place the method via
+ * which the URI is to be fetched.
+ *
+ * \return the string-name of the queue in which a fetch request
+ * for the given URI should be placed.
+ */
string QueueName(string URI,MethodConfig const *&Config);
- // FDSET managers for derived classes
+ /** \brief Build up the set of file descriptors upon which select() should
+ * block.
+ *
+ * The default implementation inserts the file descriptors
+ * corresponding to active downloads.
+ *
+ * \param[out] Fd The largest file descriptor in the generated sets.
+ *
+ * \param[out] RSet The set of file descriptors that should be
+ * watched for input.
+ *
+ * \param[out] WSet The set of file descriptors that should be
+ * watched for output.
+ */
virtual void SetFds(int &Fd,fd_set *RSet,fd_set *WSet);
+
+ /** Handle input from and output to file descriptors which select()
+ * has determined are ready. The default implementation
+ * dispatches to all active downloads.
+ *
+ * \param RSet The set of file descriptors that are ready for
+ * input.
+ *
+ * \param WSet The set of file descriptors that are ready for
+ * output.
+ */
virtual void RunFds(fd_set *RSet,fd_set *WSet);
- // A queue calls this when it dequeues an item
+ /** \brief Check for idle queues with ready-to-fetch items.
+ *
+ * Called by pkgAcquire::Queue::Done each time an item is dequeued
+ * but remains on some queues; i.e., another queue should start
+ * fetching it.
+ */
void Bump();
public:
+ /** \brief Retrieve information about a fetch method by name.
+ *
+ * \param Access The name of the method to look up.
+ *
+ * \return the method whose name is Access, or \b NULL if no such method exists.
+ */
MethodConfig *GetConfig(string Access);
- enum RunResult {Continue,Failed,Cancelled};
+ /** \brief Provides information on how a download terminated. */
+ enum RunResult {
+ /** \brief All files were fetched successfully. */
+ Continue,
+
+ /** \brief Some files failed to download. */
+ Failed,
+
+ /** \brief The download was cancelled by the user (i.e., #Log's
+ * pkgAcquireStatus::Pulse() method returned \b false).
+ */
+ Cancelled};
- RunResult Run(int PulseIntervall=500000);
+ /** \brief Download all the items that have been Add()ed to this
+ * download process.
+ *
+ * This method will block until the download completes, invoking
+ * methods on #Log to report on the progress of the download.
+ *
+ * \param PulseInterval The method pkgAcquireStatus::Pulse will be
+ * invoked on #Log at intervals of PulseInterval milliseconds.
+ *
+ * \return the result of the download.
+ */
+ RunResult Run(int PulseInterval=500000);
+
+ /** \brief Remove all items from this download process, terminate
+ * all download workers, and empty all queues.
+ */
void Shutdown();
- // Simple iteration mechanism
+ /** \brief Get the first #Worker object.
+ *
+ * \return the first active worker in this download process.
+ */
inline Worker *WorkersBegin() {return Workers;};
+
+ /** \brief Advance to the next #Worker object.
+ *
+ * \return the worker immediately following I, or \b NULL if none
+ * exists.
+ */
Worker *WorkerStep(Worker *I);
+
+ /** \brief Get the head of the list of items. */
inline ItemIterator ItemsBegin() {return Items.begin();};
+
+ /** \brief Get the end iterator of the list of items. */
inline ItemIterator ItemsEnd() {return Items.end();};
// Iterate over queued Item URIs
class UriIterator;
+ /** \brief Get the head of the list of enqueued item URIs.
+ *
+ * This iterator will step over every element of every active
+ * queue.
+ */
UriIterator UriBegin();
+ /** \brief Get the end iterator of the list of enqueued item URIs. */
UriIterator UriEnd();
- // Cleans out the download dir
+ /** Deletes each entry in the given directory that is not being
+ * downloaded by this object. For instance, when downloading new
+ * list files, calling Clean() will delete the old ones.
+ *
+ * \param Dir The directory to be cleaned out.
+ *
+ * \return \b true if the directory exists and is readable.
+ */
bool Clean(string Dir);
- // Returns the size of the total download set
+ /** \return the total size in bytes of all the items included in
+ * this download.
+ */
double TotalNeeded();
+
+ /** \return the size in bytes of all non-local items included in
+ * this download.
+ */
double FetchNeeded();
+
+ /** \return the amount of data to be fetched that is already
+ * present on the filesystem.
+ */
double PartialPresent();
+ /** \brief Construct a new pkgAcquire.
+ *
+ * \param Log The progress indicator associated with this
+ * download, or \b NULL for none. This object is not owned by the
+ * download process and will not be deleted when the pkgAcquire
+ * object is destroyed. Naturally, it should live for at least as
+ * long as the pkgAcquire object does.
+ */
pkgAcquire(pkgAcquireStatus *Log = 0);
+
+ /** \brief Destroy this pkgAcquire object.
+ *
+ * Destroys all queue, method, and item objects associated with
+ * this download.
+ */
virtual ~pkgAcquire();
};
-// Description of an Item+URI
+/** \brief Represents a single download source from which an item
+ * should be downloaded.
+ *
+ * An item may have several assocated ItemDescs over its lifetime.
+ */
struct pkgAcquire::ItemDesc
{
+ /** \brief The URI from which to download this item. */
string URI;
+ /** brief A description of this item. */
string Description;
+ /** brief A shorter description of this item. */
string ShortDesc;
+ /** brief The underlying item which is to be downloaded. */
Item *Owner;
};
-// List of possible items queued for download.
+/** \brief A single download queue in a pkgAcquire object.
+ *
+ * \todo Why so many protected values?
+ */
class pkgAcquire::Queue
{
friend class pkgAcquire;
friend class pkgAcquire::UriIterator;
friend class pkgAcquire::Worker;
+
+ /** \brief The next queue in the pkgAcquire object's list of queues. */
Queue *Next;
protected:
- // Queued item
+ /** \brief A single item placed in this queue. */
struct QItem : pkgAcquire::ItemDesc
{
- QItem *Next;
+ /** \brief The next item in the queue. */
+ QItem *Next;
+ /** \brief The worker associated with this item, if any. */
pkgAcquire::Worker *Worker;
-
+
+ /** \brief Assign the ItemDesc portion of this QItem from
+ * another ItemDesc
+ */
void operator =(pkgAcquire::ItemDesc const &I)
{
URI = I.URI;
@@ -160,45 +405,141 @@ class pkgAcquire::Queue
};
};
- // Name of the queue
+ /** \brief The name of this queue. */
string Name;
- // Items queued into this queue
+ /** \brief The head of the list of items contained in this queue.
+ *
+ * \todo why a by-hand list instead of an STL structure?
+ */
QItem *Items;
+
+ /** \brief The head of the list of workers associated with this queue.
+ *
+ * \todo This is plural because support exists in Queue for
+ * multiple workers. However, it does not appear that there is
+ * any way to actually associate more than one worker with a
+ * queue.
+ *
+ * \todo Why not just use a std::set?
+ */
pkgAcquire::Worker *Workers;
+
+ /** \brief the download scheduler with which this queue is associated. */
pkgAcquire *Owner;
+
+ /** \brief The number of entries in this queue that are currently
+ * being downloaded.
+ */
signed long PipeDepth;
+
+ /** \brief The maximum number of entries that this queue will
+ * attempt to download at once.
+ */
unsigned long MaxPipeDepth;
public:
- // Put an item into this queue
+ /** \brief Insert the given fetch request into this queue. */
void Enqueue(ItemDesc &Item);
+
+ /** \brief Remove all fetch requests for the given item from this queue.
+ *
+ * \return \b true if at least one request was removed from the queue.
+ */
bool Dequeue(Item *Owner);
- // Find a Queued item
+ /** \brief Locate an item in this queue.
+ *
+ * \param URI A URI to match against.
+ * \param Owner A pkgAcquire::Worker to match against.
+ *
+ * \return the first item in the queue whose URI is #URI and that
+ * is being downloaded by #Owner.
+ */
QItem *FindItem(string URI,pkgAcquire::Worker *Owner);
+
+ /** Presumably this should start downloading an item?
+ *
+ * \todo Unimplemented. Implement it or remove?
+ */
bool ItemStart(QItem *Itm,unsigned long Size);
+
+ /** \brief Remove the given item from this queue and set its state
+ * to pkgAcquire::Item::StatDone.
+ *
+ * If this is the only queue containing the item, the item is also
+ * removed from the main queue by calling pkgAcquire::Dequeue.
+ *
+ * \param Itm The item to remove.
+ *
+ * \return \b true if no errors are encountered.
+ */
bool ItemDone(QItem *Itm);
+ /** \brief Start the worker process associated with this queue.
+ *
+ * If a worker process is already associated with this queue,
+ * this is equivalent to calling Cycle().
+ *
+ * \return \b true if the startup was successful.
+ */
bool Startup();
+
+ /** \brief Shut down the worker process associated with this queue.
+ *
+ * \param Final If \b true, then the process is stopped unconditionally.
+ * Otherwise, it is only stopped if it does not need cleanup
+ * as indicated by the pkgAcqMethod::NeedsCleanup member of
+ * its configuration.
+ *
+ * \return \b true.
+ */
bool Shutdown(bool Final);
+
+ /** \brief Send idle items to the worker process.
+ *
+ * Fills up the pipeline by inserting idle items into the worker's queue.
+ */
bool Cycle();
+
+ /** \brief Check for items that could be enqueued.
+ *
+ * Call this after an item placed in multiple queues has gone from
+ * the pkgAcquire::Item::StatFetching state to the
+ * pkgAcquire::Item::StatIdle state, to possibly refill an empty queue.
+ * This is an alias for Cycle().
+ *
+ * \todo Why both this and Cycle()? Are they expected to be
+ * different someday?
+ */
void Bump();
+ /** \brief Create a new Queue.
+ *
+ * \param Name The name of the new queue.
+ * \param Owner The download process that owns the new queue.
+ */
Queue(string Name,pkgAcquire *Owner);
+
+ /** Shut down all the worker processes associated with this queue
+ * and empty the queue.
+ */
~Queue();
};
+/** \brief Iterates over all the URIs being fetched by a pkgAcquire object. */
class pkgAcquire::UriIterator
{
+ /** The next queue to iterate over. */
pkgAcquire::Queue *CurQ;
+ /** The item that we currently point at. */
pkgAcquire::Queue::QItem *CurItem;
public:
- // Advance to the next item
inline void operator ++() {operator ++();};
+
void operator ++(int)
{
CurItem = CurItem->Next;
@@ -209,11 +550,14 @@ class pkgAcquire::UriIterator
}
};
- // Accessors
inline pkgAcquire::ItemDesc const *operator ->() const {return CurItem;};
inline bool operator !=(UriIterator const &rhs) const {return rhs.CurQ != CurQ || rhs.CurItem != CurItem;};
inline bool operator ==(UriIterator const &rhs) const {return rhs.CurQ == CurQ && rhs.CurItem == CurItem;};
+ /** \brief Create a new UriIterator.
+ *
+ * \param Q The queue over which this UriIterator should iterate.
+ */
UriIterator(pkgAcquire::Queue *Q) : CurQ(Q), CurItem(0)
{
while (CurItem == 0 && CurQ != 0)
@@ -224,61 +568,200 @@ class pkgAcquire::UriIterator
}
};
-// Configuration information from each method
+/** \brief Information about the properties of a single acquire method. */
struct pkgAcquire::MethodConfig
{
+ /** \brief The next link on the acquire method list.
+ *
+ * \todo Why not an STL container?
+ */
MethodConfig *Next;
+ /** \brief The name of this acquire method (e.g., http). */
string Access;
+ /** \brief The implementation version of this acquire method. */
string Version;
+
+ /** \brief If \b true, only one download queue should be created for this
+ * method.
+ */
bool SingleInstance;
+
+ /** \brief If \b true, this method supports pipelined downloading. */
bool Pipeline;
+
+ /** \brief If \b true, the worker process should send the entire
+ * APT configuration tree to the fetch subprocess when it starts
+ * up.
+ */
bool SendConfig;
+
+ /** \brief If \b true, this fetch method does not require network access;
+ * all files are to be acquired from the local disk.
+ */
bool LocalOnly;
+
+ /** \brief If \b true, the subprocess has to carry out some cleanup
+ * actions before shutting down.
+ *
+ * For instance, the cdrom method needs to unmount the CD after it
+ * finishes.
+ */
bool NeedsCleanup;
+
+ /** \brief If \b true, this fetch method acquires files from removable media. */
bool Removable;
+ /** \brief Set up the default method parameters.
+ *
+ * All fields are initialized to NULL, "", or \b false as
+ * appropriate.
+ */
MethodConfig();
};
+/** \brief A monitor object for downloads controlled by the pkgAcquire class.
+ *
+ * \todo Why protected members?
+ *
+ * \todo Should the double members be uint64_t?
+ */
class pkgAcquireStatus
{
protected:
+ /** \brief The last time at which this monitor object was updated. */
struct timeval Time;
+
+ /** \brief The time at which the download started. */
struct timeval StartTime;
+
+ /** \brief The number of bytes fetched as of the previous call to
+ * pkgAcquireStatus::Pulse, including local items.
+ */
double LastBytes;
+
+ /** \brief The current rate of download as of the most recent call
+ * to pkgAcquireStatus::Pulse, in bytes per second.
+ */
double CurrentCPS;
+
+ /** \brief The number of bytes fetched as of the most recent call
+ * to pkgAcquireStatus::Pulse, including local items.
+ */
double CurrentBytes;
+
+ /** \brief The total number of bytes that need to be fetched.
+ *
+ * \warning This member is inaccurate, as new items might be
+ * enqueued while the download is in progress!
+ */
double TotalBytes;
+
+ /** \brief The total number of bytes accounted for by items that
+ * were successfully fetched.
+ */
double FetchedBytes;
+
+ /** \brief The amount of time that has elapsed since the download
+ * started.
+ */
unsigned long ElapsedTime;
+
+ /** \brief The total number of items that need to be fetched.
+ *
+ * \warning This member is inaccurate, as new items might be
+ * enqueued while the download is in progress!
+ */
unsigned long TotalItems;
+
+ /** \brief The number of items that have been successfully downloaded. */
unsigned long CurrentItems;
public:
+ /** \brief If \b true, the download scheduler should call Pulse()
+ * at the next available opportunity.
+ */
bool Update;
+
+ /** \brief If \b true, extra Pulse() invocations will be performed.
+ *
+ * With this option set, Pulse() will be called every time that a
+ * download item starts downloading, finishes downloading, or
+ * terminates with an error.
+ */
bool MorePulses;
- // Called by items when they have finished a real download
+ /** \brief Invoked when a local or remote file has been completely fetched.
+ *
+ * \param Size The size of the file fetched.
+ *
+ * \param ResumePoint How much of the file was already fetched.
+ */
virtual void Fetched(unsigned long Size,unsigned long ResumePoint);
- // Called to change media
+ /** \brief Invoked when the user should be prompted to change the
+ * inserted removable media.
+ *
+ * This method should not return until the user has confirmed to
+ * the user interface that the media change is complete.
+ *
+ * \param Media The name of the media type that should be changed.
+ *
+ * \param Drive The identifying name of the drive whose media
+ * should be changed.
+ *
+ * \return \b true if the user confirms the media change, \b
+ * false if it is cancelled.
+ *
+ * \todo This is a horrible blocking monster; it should be CPSed
+ * with prejudice.
+ */
virtual bool MediaChange(string Media,string Drive) = 0;
- // Each of these is called by the workers when an event occures
+ /** \brief Invoked when an item is confirmed to be up-to-date.
+
+ * For instance, when an HTTP download is informed that the file on
+ * the server was not modified.
+ */
virtual void IMSHit(pkgAcquire::ItemDesc &/*Itm*/) {};
+
+ /** \brief Invoked when some of an item's data is fetched. */
virtual void Fetch(pkgAcquire::ItemDesc &/*Itm*/) {};
+
+ /** \brief Invoked when an item is successfully and completely fetched. */
virtual void Done(pkgAcquire::ItemDesc &/*Itm*/) {};
+
+ /** \brief Invoked when the process of fetching an item encounters
+ * a fatal error.
+ */
virtual void Fail(pkgAcquire::ItemDesc &/*Itm*/) {};
- virtual bool Pulse(pkgAcquire *Owner); // returns false on user cancel
+
+ /** \brief Periodically invoked while the Acquire process is underway.
+ *
+ * Subclasses should first call pkgAcquireStatus::Pulse(), then
+ * update their status output. The download process is blocked
+ * while Pulse() is being called.
+ *
+ * \return \b false if the user asked to cancel the whole Acquire process.
+ *
+ * \see pkgAcquire::Run
+ */
+ virtual bool Pulse(pkgAcquire *Owner);
+
+ /** \brief Invoked when the Acquire process starts running. */
virtual void Start();
+
+ /** \brief Invoked when the Acquire process stops running. */
virtual void Stop();
+ /** \brief Initialize all counters to 0 and the time to the current time. */
pkgAcquireStatus();
virtual ~pkgAcquireStatus() {};
};
+/** @} */
+
#endif
diff --git a/apt-pkg/algorithms.cc b/apt-pkg/algorithms.cc
index 479927d65..ac9d3be0b 100644
--- a/apt-pkg/algorithms.cc
+++ b/apt-pkg/algorithms.cc
@@ -20,10 +20,12 @@
#include <apt-pkg/algorithms.h>
#include <apt-pkg/error.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/version.h>
#include <apt-pkg/sptr.h>
+
#include <apti18n.h>
-
+#include <sys/types.h>
#include <iostream>
/*}}}*/
using namespace std;
@@ -220,6 +222,8 @@ void pkgSimulate::ShortBreaks()
the necessary calculations to deal with the problems. */
bool pkgApplyStatus(pkgDepCache &Cache)
{
+ pkgDepCache::ActionGroup group(Cache);
+
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
{
if (I->VersionList == 0)
@@ -230,13 +234,13 @@ bool pkgApplyStatus(pkgDepCache &Cache)
I->InstState == pkgCache::State::HoldReInstReq)
{
if (I->CurrentVer != 0 && I.CurrentVer().Downloadable() == true)
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
else
{
// Is this right? Will dpkg choke on an upgrade?
if (Cache[I].CandidateVer != 0 &&
Cache[I].CandidateVerIter(Cache).Downloadable() == true)
- Cache.MarkInstall(I);
+ Cache.MarkInstall(I, false, 0, false);
else
return _error->Error(_("The package %s needs to be reinstalled, "
"but I can't find an archive for it."),I.Name());
@@ -253,12 +257,12 @@ bool pkgApplyStatus(pkgDepCache &Cache)
case pkgCache::State::HalfConfigured:
if ((I->CurrentVer != 0 && I.CurrentVer().Downloadable() == true) ||
I.State() != pkgCache::PkgIterator::NeedsUnpack)
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
else
{
if (Cache[I].CandidateVer != 0 &&
Cache[I].CandidateVerIter(Cache).Downloadable() == true)
- Cache.MarkInstall(I);
+ Cache.MarkInstall(I, true, 0, false);
else
Cache.MarkDelete(I);
}
@@ -284,10 +288,12 @@ bool pkgApplyStatus(pkgDepCache &Cache)
on the result. */
bool pkgFixBroken(pkgDepCache &Cache)
{
+ pkgDepCache::ActionGroup group(Cache);
+
// Auto upgrade all broken packages
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
if (Cache[I].NowBroken() == true)
- Cache.MarkInstall(I,true);
+ Cache.MarkInstall(I, true, 0, false);
/* Fix packages that are in a NeedArchive state but don't have a
downloadable install version */
@@ -300,7 +306,7 @@ bool pkgFixBroken(pkgDepCache &Cache)
if (Cache[I].InstVerIter(Cache).Downloadable() == false)
continue;
- Cache.MarkInstall(I,true);
+ Cache.MarkInstall(I, true, 0, false);
}
pkgProblemResolver Fix(&Cache);
@@ -317,23 +323,25 @@ bool pkgFixBroken(pkgDepCache &Cache)
*/
bool pkgDistUpgrade(pkgDepCache &Cache)
{
+ pkgDepCache::ActionGroup group(Cache);
+
/* Auto upgrade all installed packages, this provides the basis
for the installation */
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
if (I->CurrentVer != 0)
- Cache.MarkInstall(I,true);
+ Cache.MarkInstall(I, true, 0, false);
/* Now, auto upgrade all essential packages - this ensures that
the essential packages are present and working */
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
if ((I->Flags & pkgCache::Flag::Essential) == pkgCache::Flag::Essential)
- Cache.MarkInstall(I,true);
+ Cache.MarkInstall(I, true, 0, false);
/* We do it again over all previously installed packages to force
conflict resolution on them all. */
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
if (I->CurrentVer != 0)
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
pkgProblemResolver Fix(&Cache);
@@ -345,7 +353,7 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
if (I->SelectedState == pkgCache::State::Hold)
{
Fix.Protect(I);
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
}
}
}
@@ -360,6 +368,8 @@ bool pkgDistUpgrade(pkgDepCache &Cache)
to install packages not marked for install */
bool pkgAllUpgrade(pkgDepCache &Cache)
{
+ pkgDepCache::ActionGroup group(Cache);
+
pkgProblemResolver Fix(&Cache);
if (Cache.BrokenCount() != 0)
@@ -376,7 +386,7 @@ bool pkgAllUpgrade(pkgDepCache &Cache)
continue;
if (I->CurrentVer != 0 && Cache[I].InstallVer != 0)
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
}
return Fix.ResolveByKeep();
@@ -389,6 +399,8 @@ bool pkgAllUpgrade(pkgDepCache &Cache)
the package is restored. */
bool pkgMinimizeUpgrade(pkgDepCache &Cache)
{
+ pkgDepCache::ActionGroup group(Cache);
+
if (Cache.BrokenCount() != 0)
return false;
@@ -405,9 +417,9 @@ bool pkgMinimizeUpgrade(pkgDepCache &Cache)
continue;
// Keep it and see if that is OK
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
if (Cache.BrokenCount() != 0)
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
else
{
// If keep didnt actually do anything then there was no change..
@@ -565,6 +577,8 @@ void pkgProblemResolver::MakeScores()
installable */
bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
{
+ pkgDepCache::ActionGroup group(Cache);
+
if ((Flags[Pkg->ID] & Upgradable) == 0 || Cache[Pkg].Upgradable() == false)
return false;
if ((Flags[Pkg->ID] & Protected) == Protected)
@@ -573,7 +587,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
Flags[Pkg->ID] &= ~Upgradable;
bool WasKept = Cache[Pkg].Keep();
- Cache.MarkInstall(Pkg,false);
+ Cache.MarkInstall(Pkg, false, 0, false);
// This must be a virtual package or something like that.
if (Cache[Pkg].InstVerIter(Cache).end() == true)
@@ -658,7 +672,7 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
if (Fail == true)
{
if (WasKept == true)
- Cache.MarkKeep(Pkg);
+ Cache.MarkKeep(Pkg, false, false);
else
Cache.MarkDelete(Pkg);
return false;
@@ -685,6 +699,8 @@ bool pkgProblemResolver::DoUpgrade(pkgCache::PkgIterator Pkg)
upgrade packages to advoid problems. */
bool pkgProblemResolver::Resolve(bool BrokenFix)
{
+ pkgDepCache::ActionGroup group(Cache);
+
unsigned long Size = Cache.Head().PackageCount;
// Record which packages are marked for install
@@ -700,7 +716,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
{
if (Cache[I].InstBroken() == true && BrokenFix == true)
{
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
if (Cache[I].Install() == true)
Again = true;
}
@@ -766,14 +782,14 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
pkgCache::Version *OldVer = Cache[I].InstallVer;
Flags[I->ID] &= ReInstateTried;
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
if (Cache[I].InstBroken() == true ||
OldBreaks < Cache.BrokenCount())
{
if (OldVer == 0)
Cache.MarkDelete(I);
else
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
}
else
if (Debug == true)
@@ -818,7 +834,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
{
if (Debug == true)
clog << " Or group keep for " << I.Name() << endl;
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
Change = true;
}
}
@@ -868,7 +884,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
}
Change = true;
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
break;
}
@@ -905,7 +921,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
/* See if a keep will do, unless the package is protected,
then installing it will be necessary */
bool Installed = Cache[I].Install();
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
if (Cache[I].InstBroken() == false)
{
// Unwind operation will be keep now
@@ -914,7 +930,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
// Restore
if (InOr == true && Installed == true)
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
if (Debug == true)
clog << " Holding Back " << I.Name() << " rather than change " << Start.TargetPkg().Name() << endl;
@@ -986,7 +1002,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
// Restore
if (InOr == true && Installed == true)
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
if (Debug == true)
clog << " Holding Back " << I.Name() << " because I can't find " << Start.TargetPkg().Name() << endl;
@@ -1031,7 +1047,7 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
{
if (Debug == true)
clog << " Fixing " << I.Name() << " via keep of " << J->Pkg.Name() << endl;
- Cache.MarkKeep(J->Pkg);
+ Cache.MarkKeep(J->Pkg, false, false);
}
if (Counter > 1)
@@ -1061,6 +1077,20 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
return _error->Error(_("Unable to correct problems, you have held broken packages."));
}
+ // set the auto-flags (mvo: I'm not sure if we _really_ need this, but
+ // I didn't managed
+ pkgCache::PkgIterator I = Cache.PkgBegin();
+ for (;I.end() != true; I++) {
+ if (Cache[I].NewInstall() && !(Flags[I->ID] & PreInstalled)) {
+ if(_config->FindI("Debug::pkgAutoRemove",false)) {
+ std::clog << "Resolve installed new pkg: " << I.Name()
+ << " (now marking it as auto)" << std::endl;
+ }
+ Cache[I].Flags |= pkgCache::Flag::Auto;
+ }
+ }
+
+
return true;
}
/*}}}*/
@@ -1071,6 +1101,8 @@ bool pkgProblemResolver::Resolve(bool BrokenFix)
system was non-broken previously. */
bool pkgProblemResolver::ResolveByKeep()
{
+ pkgDepCache::ActionGroup group(Cache);
+
unsigned long Size = Cache.Head().PackageCount;
if (Debug == true)
@@ -1104,7 +1136,7 @@ bool pkgProblemResolver::ResolveByKeep()
{
if (Debug == true)
clog << "Keeping package " << I.Name() << endl;
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
if (Cache[I].InstBroken() == false)
{
K = PList - 1;
@@ -1152,7 +1184,7 @@ bool pkgProblemResolver::ResolveByKeep()
{
if (Debug == true)
clog << " Keeping Package " << Pkg.Name() << " due to dep" << endl;
- Cache.MarkKeep(Pkg);
+ Cache.MarkKeep(Pkg, false, false);
}
if (Cache[I].InstBroken() == false)
@@ -1189,6 +1221,8 @@ bool pkgProblemResolver::ResolveByKeep()
/* This is used to make sure protected packages are installed */
void pkgProblemResolver::InstallProtect()
{
+ pkgDepCache::ActionGroup group(Cache);
+
for (pkgCache::PkgIterator I = Cache.PkgBegin(); I.end() == false; I++)
{
if ((Flags[I->ID] & Protected) == Protected)
@@ -1196,7 +1230,7 @@ void pkgProblemResolver::InstallProtect()
if ((Flags[I->ID] & ToRemove) == ToRemove)
Cache.MarkDelete(I);
else
- Cache.MarkInstall(I,false);
+ Cache.MarkInstall(I, false, 0, false);
}
}
}
@@ -1232,3 +1266,4 @@ void pkgPrioSortList(pkgCache &Cache,pkgCache::Version **List)
qsort(List,Count,sizeof(*List),PrioComp);
}
/*}}}*/
+
diff --git a/apt-pkg/cacheiterators.h b/apt-pkg/cacheiterators.h
index 2b326bd65..d5a9c7b0d 100644
--- a/apt-pkg/cacheiterators.h
+++ b/apt-pkg/cacheiterators.h
@@ -99,7 +99,7 @@ class pkgCache::VerIterator
{
Version *Ver;
pkgCache *Owner;
-
+
void _dummy();
public:
@@ -128,6 +128,8 @@ class pkgCache::VerIterator
inline const char *Section() const {return Ver->Section == 0?0:Owner->StrP + Ver->Section;};
inline const char *Arch() const {return Ver->Arch == 0?0:Owner->StrP + Ver->Arch;};
inline PkgIterator ParentPkg() const {return PkgIterator(*Owner,Owner->PkgP + Ver->ParentPkg);};
+ inline DescIterator DescriptionList() const;
+ DescIterator TranslatedDescription() const;
inline DepIterator DependsList() const;
inline PrvIterator ProvidesList() const;
inline VerFileIterator FileList() const;
@@ -148,6 +150,50 @@ class pkgCache::VerIterator
};
};
+// Description Iterator
+class pkgCache::DescIterator
+{
+ Description *Desc;
+ pkgCache *Owner;
+
+ void _dummy();
+
+ public:
+
+ // Iteration
+ void operator ++(int) {if (Desc != Owner->DescP) Desc = Owner->DescP + Desc->NextDesc;};
+ inline void operator ++() {operator ++(0);};
+ inline bool end() const {return Desc == Owner->DescP?true:false;};
+ inline void operator =(const DescIterator &B) {Desc = B.Desc; Owner = B.Owner;};
+
+ // Comparison
+ inline bool operator ==(const DescIterator &B) const {return Desc == B.Desc;};
+ inline bool operator !=(const DescIterator &B) const {return Desc != B.Desc;};
+ int CompareDesc(const DescIterator &B) const;
+
+ // Accessors
+ inline Description *operator ->() {return Desc;};
+ inline Description const *operator ->() const {return Desc;};
+ inline Description &operator *() {return *Desc;};
+ inline Description const &operator *() const {return *Desc;};
+ inline operator Description *() {return Desc == Owner->DescP?0:Desc;};
+ inline operator Description const *() const {return Desc == Owner->DescP?0:Desc;};
+ inline pkgCache *Cache() {return Owner;};
+
+ inline const char *LanguageCode() const {return Owner->StrP + Desc->language_code;};
+ inline const char *md5() const {return Owner->StrP + Desc->md5sum;};
+ inline DescFileIterator FileList() const;
+ inline unsigned long Index() const {return Desc - Owner->DescP;};
+
+ inline DescIterator() : Desc(0), Owner(0) {};
+ inline DescIterator(pkgCache &Owner,Description *Trg = 0) : Desc(Trg),
+ Owner(&Owner)
+ {
+ if (Desc == 0)
+ Desc = Owner.DescP;
+ };
+};
+
// Dependency iterator
class pkgCache::DepIterator
{
@@ -338,6 +384,38 @@ class pkgCache::VerFileIterator
inline VerFileIterator(pkgCache &Owner,VerFile *Trg) : Owner(&Owner), FileP(Trg) {};
};
+// Description File
+class pkgCache::DescFileIterator
+{
+ pkgCache *Owner;
+ DescFile *FileP;
+
+ public:
+
+ // Iteration
+ void operator ++(int) {if (FileP != Owner->DescFileP) FileP = Owner->DescFileP + FileP->NextFile;};
+ inline void operator ++() {operator ++(0);};
+ inline bool end() const {return FileP == Owner->DescFileP?true:false;};
+
+ // Comparison
+ inline bool operator ==(const DescFileIterator &B) const {return FileP == B.FileP;};
+ inline bool operator !=(const DescFileIterator &B) const {return FileP != B.FileP;};
+
+ // Accessors
+ inline DescFile *operator ->() {return FileP;};
+ inline DescFile const *operator ->() const {return FileP;};
+ inline DescFile const &operator *() const {return *FileP;};
+ inline operator DescFile *() {return FileP == Owner->DescFileP?0:FileP;};
+ inline operator DescFile const *() const {return FileP == Owner->DescFileP?0:FileP;};
+ inline pkgCache *Cache() {return Owner;};
+
+ inline PkgFileIterator File() const {return PkgFileIterator(*Owner,FileP->File + Owner->PkgFileP);};
+ inline unsigned long Index() const {return FileP - Owner->DescFileP;};
+
+ inline DescFileIterator() : Owner(0), FileP(0) {};
+ inline DescFileIterator(pkgCache &Owner,DescFile *Trg) : Owner(&Owner), FileP(Trg) {};
+};
+
// Inlined Begin functions cant be in the class because of order problems
inline pkgCache::VerIterator pkgCache::PkgIterator::VersionList() const
{return VerIterator(*Owner,Owner->VerP + Pkg->VersionList);};
@@ -347,11 +425,15 @@ inline pkgCache::DepIterator pkgCache::PkgIterator::RevDependsList() const
{return DepIterator(*Owner,Owner->DepP + Pkg->RevDepends,Pkg);};
inline pkgCache::PrvIterator pkgCache::PkgIterator::ProvidesList() const
{return PrvIterator(*Owner,Owner->ProvideP + Pkg->ProvidesList,Pkg);};
+inline pkgCache::DescIterator pkgCache::VerIterator::DescriptionList() const
+ {return DescIterator(*Owner,Owner->DescP + Ver->DescriptionList);};
inline pkgCache::PrvIterator pkgCache::VerIterator::ProvidesList() const
{return PrvIterator(*Owner,Owner->ProvideP + Ver->ProvidesList,Ver);};
inline pkgCache::DepIterator pkgCache::VerIterator::DependsList() const
{return DepIterator(*Owner,Owner->DepP + Ver->DependsList,Ver);};
inline pkgCache::VerFileIterator pkgCache::VerIterator::FileList() const
{return VerFileIterator(*Owner,Owner->VerFileP + Ver->FileList);};
+inline pkgCache::DescFileIterator pkgCache::DescIterator::FileList() const
+ {return DescFileIterator(*Owner,Owner->DescFileP + Desc->FileList);};
#endif
diff --git a/apt-pkg/cdrom.cc b/apt-pkg/cdrom.cc
index ce1beb39b..b42c82dd0 100644
--- a/apt-pkg/cdrom.cc
+++ b/apt-pkg/cdrom.cc
@@ -30,12 +30,16 @@ using namespace std;
search that short circuits when it his a package file in the dir.
This speeds it up greatly as the majority of the size is in the
binary-* sub dirs. */
-bool pkgCdrom::FindPackages(string CD,vector<string> &List,
- vector<string> &SList, vector<string> &SigList,
+bool pkgCdrom::FindPackages(string CD,
+ vector<string> &List,
+ vector<string> &SList,
+ vector<string> &SigList,
+ vector<string> &TransList,
string &InfoDir, pkgCdromStatus *log,
unsigned int Depth)
{
static ino_t Inodes[9];
+ DIR *D;
// if we have a look we "pulse" now
if(log)
@@ -90,8 +94,28 @@ bool pkgCdrom::FindPackages(string CD,vector<string> &List,
if (_config->FindB("APT::CDROM::Thorough",false) == false)
return true;
}
+
+ // see if we find translatin indexes
+ if (stat("i18n",&Buf) == 0)
+ {
+ D = opendir("i18n");
+ for (struct dirent *Dir = readdir(D); Dir != 0; Dir = readdir(D))
+ {
+ if(strstr(Dir->d_name,"Translation") != NULL)
+ {
+ if (_config->FindB("Debug::aptcdrom",false) == true)
+ std::clog << "found translations: " << Dir->d_name << "\n";
+ string file = Dir->d_name;
+ if(file.substr(file.size()-3,file.size()) == ".gz")
+ file = file.substr(0,file.size()-3);
+ TransList.push_back(CD+"i18n/"+ file);
+ }
+ }
+ closedir(D);
+ }
+
- DIR *D = opendir(".");
+ D = opendir(".");
if (D == 0)
return _error->Errno("opendir","Unable to read %s",CD.c_str());
@@ -127,7 +151,7 @@ bool pkgCdrom::FindPackages(string CD,vector<string> &List,
Inodes[Depth] = Buf.st_ino;
// Descend
- if (FindPackages(CD + Dir->d_name,List,SList,SigList,InfoDir,log,Depth+1) == false)
+ if (FindPackages(CD + Dir->d_name,List,SList,SigList,TransList,InfoDir,log,Depth+1) == false)
break;
if (chdir(CD.c_str()) != 0)
@@ -612,9 +636,10 @@ bool pkgCdrom::Add(pkgCdromStatus *log)
vector<string> List;
vector<string> SourceList;
vector<string> SigList;
+ vector<string> TransList;
string StartDir = SafeGetCWD();
string InfoDir;
- if (FindPackages(CDROM,List,SourceList, SigList,InfoDir,log) == false)
+ if (FindPackages(CDROM,List,SourceList, SigList,TransList,InfoDir,log) == false)
{
log->Update("\n");
return false;
@@ -642,11 +667,13 @@ bool pkgCdrom::Add(pkgCdromStatus *log)
DropRepeats(List,"Packages");
DropRepeats(SourceList,"Sources");
DropRepeats(SigList,"Release.gpg");
+ DropRepeats(TransList,"");
if(log) {
msg.str("");
- ioprintf(msg, _("Found %i package indexes, %i source indexes and "
- "%i signatures\n"),
- List.size(), SourceList.size(), SigList.size());
+ ioprintf(msg, _("Found %i package indexes, %i source indexes, "
+ "%i translation indexes and %i signatures\n"),
+ List.size(), SourceList.size(), TransList.size(),
+ SigList.size());
log->Update(msg.str(), STEP_SCAN);
}
@@ -736,8 +763,10 @@ bool pkgCdrom::Add(pkgCdromStatus *log)
// Copy the package files to the state directory
PackageCopy Copy;
SourceCopy SrcCopy;
+ TranslationsCopy TransCopy;
if (Copy.CopyPackages(CDROM,Name,List, log) == false ||
- SrcCopy.CopyPackages(CDROM,Name,SourceList, log) == false)
+ SrcCopy.CopyPackages(CDROM,Name,SourceList, log) == false ||
+ TransCopy.CopyTranslations(CDROM,Name,TransList, log) == false)
return false;
// reduce the List so that it takes less space in sources.list
diff --git a/apt-pkg/cdrom.h b/apt-pkg/cdrom.h
index 085eb64e2..e18aaff3e 100644
--- a/apt-pkg/cdrom.h
+++ b/apt-pkg/cdrom.h
@@ -50,8 +50,11 @@ class pkgCdrom
};
- bool FindPackages(string CD,vector<string> &List,
- vector<string> &SList, vector<string> &SigList,
+ bool FindPackages(string CD,
+ vector<string> &List,
+ vector<string> &SList,
+ vector<string> &SigList,
+ vector<string> &TransList,
string &InfoDir, pkgCdromStatus *log,
unsigned int Depth = 0);
bool DropBinaryArch(vector<string> &List);
diff --git a/apt-pkg/contrib/strutl.cc b/apt-pkg/contrib/strutl.cc
index d96155917..37d263794 100644
--- a/apt-pkg/contrib/strutl.cc
+++ b/apt-pkg/contrib/strutl.cc
@@ -32,12 +32,55 @@
#include <regex.h>
#include <errno.h>
#include <stdarg.h>
+#include <iconv.h>
#include "config.h"
using namespace std;
/*}}}*/
+// UTF8ToCodeset - Convert some UTF-8 string for some codeset /*{{{*/
+// ---------------------------------------------------------------------
+/* This is handy to use before display some information for enduser */
+bool UTF8ToCodeset(const char *codeset, const string &orig, string *dest)
+{
+ iconv_t cd;
+ const char *inbuf;
+ char *inptr, *outbuf, *outptr;
+ size_t insize, outsize;
+
+ cd = iconv_open(codeset, "UTF-8");
+ if (cd == (iconv_t)(-1)) {
+ // Something went wrong
+ if (errno == EINVAL)
+ _error->Error("conversion from 'UTF-8' to '%s' not available",
+ codeset);
+ else
+ perror("iconv_open");
+
+ // Clean the destination string
+ *dest = "";
+
+ return false;
+ }
+
+ insize = outsize = orig.size();
+ inbuf = orig.data();
+ inptr = (char *)inbuf;
+ outbuf = new char[insize+1];
+ outptr = outbuf;
+
+ iconv(cd, &inptr, &insize, &outptr, &outsize);
+ *outptr = '\0';
+
+ *dest = outbuf;
+ delete[] outbuf;
+
+ iconv_close(cd);
+
+ return true;
+}
+ /*}}}*/
// strstrip - Remove white space from the front and back of a string /*{{{*/
// ---------------------------------------------------------------------
/* This is handy to use when parsing a file. It also removes \n's left
diff --git a/apt-pkg/contrib/strutl.h b/apt-pkg/contrib/strutl.h
index 6ec2b7811..254087267 100644
--- a/apt-pkg/contrib/strutl.h
+++ b/apt-pkg/contrib/strutl.h
@@ -38,7 +38,8 @@ using std::ostream;
#define APT_FORMAT2
#define APT_FORMAT3
#endif
-
+
+bool UTF8ToCodeset(const char *codeset, const string &orig, string *dest);
char *_strstrip(char *String);
char *_strtabexpand(char *String,size_t Len);
bool ParseQuoteWord(const char *&String,string &Res);
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index ff8bce85d..38ecdd16a 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -320,6 +320,170 @@ pkgCache::PkgFileIterator debPackagesIndex::FindInCache(pkgCache &Cache) const
}
/*}}}*/
+// TranslationsIndex::debTranslationsIndex - Contructor /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section) :
+ pkgIndexFile(true), URI(URI), Dist(Dist), Section(Section)
+{
+}
+ /*}}}*/
+// TranslationIndex::Trans* - Return the URI to the translation files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+inline string debTranslationsIndex::IndexFile(const char *Type) const
+{
+ return _config->FindDir("Dir::State::lists") + URItoFileName(IndexURI(Type));
+}
+string debTranslationsIndex::IndexURI(const char *Type) const
+{
+ string Res;
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Res = URI + Dist;
+ else
+ Res = URI;
+ }
+ else
+ Res = URI + "dists/" + Dist + '/' + Section +
+ "/i18n/Translation-";
+
+ Res += Type;
+ return Res;
+}
+ /*}}}*/
+// TranslationsIndex::GetIndexes - Fetch the index files /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debTranslationsIndex::GetIndexes(pkgAcquire *Owner) const
+{
+ if (TranslationsAvailable()) {
+ string TranslationFile = "Translation-" + LanguageCode();
+ new pkgAcqIndexTrans(Owner, IndexURI(LanguageCode().c_str()),
+ Info(TranslationFile.c_str()),
+ TranslationFile);
+ }
+
+ return true;
+}
+ /*}}}*/
+// TranslationsIndex::Describe - Give a descriptive path to the index /*{{{*/
+// ---------------------------------------------------------------------
+/* This should help the user find the index in the sources.list and
+ in the filesystem for problem solving */
+string debTranslationsIndex::Describe(bool Short) const
+{
+ char S[300];
+ if (Short == true)
+ snprintf(S,sizeof(S),"%s",Info(TranslationFile().c_str()).c_str());
+ else
+ snprintf(S,sizeof(S),"%s (%s)",Info(TranslationFile().c_str()).c_str(),
+ IndexFile(LanguageCode().c_str()).c_str());
+ return S;
+}
+ /*}}}*/
+// TranslationsIndex::Info - One liner describing the index URI /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+string debTranslationsIndex::Info(const char *Type) const
+{
+ string Info = ::URI::SiteOnly(URI) + ' ';
+ if (Dist[Dist.size() - 1] == '/')
+ {
+ if (Dist != "/")
+ Info += Dist;
+ }
+ else
+ Info += Dist + '/' + Section;
+ Info += " ";
+ Info += Type;
+ return Info;
+}
+ /*}}}*/
+bool debTranslationsIndex::HasPackages() const
+{
+ if(!TranslationsAvailable())
+ return false;
+
+ return FileExists(IndexFile(LanguageCode().c_str()));
+}
+
+// TranslationsIndex::Exists - Check if the index is available /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debTranslationsIndex::Exists() const
+{
+ return FileExists(IndexFile(LanguageCode().c_str()));
+}
+ /*}}}*/
+// TranslationsIndex::Size - Return the size of the index /*{{{*/
+// ---------------------------------------------------------------------
+/* This is really only used for progress reporting. */
+unsigned long debTranslationsIndex::Size() const
+{
+ struct stat S;
+ if (stat(IndexFile(LanguageCode().c_str()).c_str(),&S) != 0)
+ return 0;
+ return S.st_size;
+}
+ /*}}}*/
+// TranslationsIndex::Merge - Load the index file into a cache /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool debTranslationsIndex::Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const
+{
+ // Check the translation file, if in use
+ string TranslationFile = IndexFile(LanguageCode().c_str());
+ if (TranslationsAvailable() && FileExists(TranslationFile))
+ {
+ FileFd Trans(TranslationFile,FileFd::ReadOnly);
+ debListParser TransParser(&Trans);
+ if (_error->PendingError() == true)
+ return false;
+
+ Prog.SubProgress(0, Info(TranslationFile.c_str()));
+ if (Gen.SelectFile(TranslationFile,string(),*this) == false)
+ return _error->Error("Problem with SelectFile %s",TranslationFile.c_str());
+
+ // Store the IMS information
+ pkgCache::PkgFileIterator TransFile = Gen.GetCurFile();
+ struct stat TransSt;
+ if (fstat(Trans.Fd(),&TransSt) != 0)
+ return _error->Errno("fstat","Failed to stat");
+ TransFile->Size = TransSt.st_size;
+ TransFile->mtime = TransSt.st_mtime;
+
+ if (Gen.MergeList(TransParser) == false)
+ return _error->Error("Problem with MergeList %s",TranslationFile.c_str());
+ }
+
+ return true;
+}
+ /*}}}*/
+// TranslationsIndex::FindInCache - Find this index /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgCache::PkgFileIterator debTranslationsIndex::FindInCache(pkgCache &Cache) const
+{
+ string FileName = IndexFile(LanguageCode().c_str());
+
+ pkgCache::PkgFileIterator File = Cache.FileBegin();
+ for (; File.end() == false; File++)
+ {
+ if (FileName != File.FileName())
+ continue;
+
+ struct stat St;
+ if (stat(File.FileName(),&St) != 0)
+ return pkgCache::PkgFileIterator(Cache);
+ if ((unsigned)St.st_size != File->Size || St.st_mtime != File->mtime)
+ return pkgCache::PkgFileIterator(Cache);
+ return File;
+ }
+ return File;
+}
+ /*}}}*/
// StatusIndex::debStatusIndex - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -416,6 +580,11 @@ class debIFTypePkg : public pkgIndexFile::Type
};
debIFTypePkg() {Label = "Debian Package Index";};
};
+class debIFTypeTrans : public debIFTypePkg
+{
+ public:
+ debIFTypeTrans() {Label = "Debian Translation Index";};
+};
class debIFTypeStatus : public pkgIndexFile::Type
{
public:
@@ -428,6 +597,7 @@ class debIFTypeStatus : public pkgIndexFile::Type
};
static debIFTypeSrc _apt_Src;
static debIFTypePkg _apt_Pkg;
+static debIFTypeTrans _apt_Trans;
static debIFTypeStatus _apt_Status;
const pkgIndexFile::Type *debSourcesIndex::GetType() const
@@ -438,6 +608,10 @@ const pkgIndexFile::Type *debPackagesIndex::GetType() const
{
return &_apt_Pkg;
}
+const pkgIndexFile::Type *debTranslationsIndex::GetType() const
+{
+ return &_apt_Trans;
+}
const pkgIndexFile::Type *debStatusIndex::GetType() const
{
return &_apt_Status;
diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h
index a1b9583a4..57005222f 100644
--- a/apt-pkg/deb/debindexfile.h
+++ b/apt-pkg/deb/debindexfile.h
@@ -74,6 +74,36 @@ class debPackagesIndex : public pkgIndexFile
debPackagesIndex(string URI,string Dist,string Section,bool Trusted);
};
+class debTranslationsIndex : public pkgIndexFile
+{
+ string URI;
+ string Dist;
+ string Section;
+
+ string Info(const char *Type) const;
+ string IndexFile(const char *Type) const;
+ string IndexURI(const char *Type) const;
+
+ inline string TranslationFile() const {return "Translation-" + LanguageCode();};
+
+ public:
+
+ virtual const Type *GetType() const;
+
+ // Interface for acquire
+ virtual string Describe(bool Short) const;
+ virtual bool GetIndexes(pkgAcquire *Owner) const;
+
+ // Interface for the Cache Generator
+ virtual bool Exists() const;
+ virtual bool HasPackages() const;
+ virtual unsigned long Size() const;
+ virtual bool Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const;
+ virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+
+ debTranslationsIndex(string URI,string Dist,string Section);
+};
+
class debSourcesIndex : public pkgIndexFile
{
string URI;
diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc
index d0dc7a260..c2b26b5eb 100644
--- a/apt-pkg/deb/deblistparser.cc
+++ b/apt-pkg/deb/deblistparser.cc
@@ -15,6 +15,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/crc-16.h>
+#include <apt-pkg/md5.h>
#include <ctype.h>
@@ -117,6 +118,48 @@ bool debListParser::NewVersion(pkgCache::VerIterator Ver)
return true;
}
/*}}}*/
+// ListParser::Description - Return the description string /*{{{*/
+// ---------------------------------------------------------------------
+/* This is to return the string describing the package in debian
+ form. If this returns the blank string then the entry is assumed to
+ only describe package properties */
+string debListParser::Description()
+{
+ if (DescriptionLanguage().empty())
+ return Section.FindS("Description");
+ else
+ return Section.FindS(("Description-" + pkgIndexFile::LanguageCode()).c_str());
+}
+ /*}}}*/
+// ListParser::DescriptionLanguage - Return the description lang string /*{{{*/
+// ---------------------------------------------------------------------
+/* This is to return the string describing the language of
+ description. If this returns the blank string then the entry is
+ assumed to describe original description. */
+string debListParser::DescriptionLanguage()
+{
+ return Section.FindS("Description").empty() ? pkgIndexFile::LanguageCode() : "";
+}
+ /*}}}*/
+// ListParser::Description - Return the description_md5 MD5SumValue /*{{{*/
+// ---------------------------------------------------------------------
+/* This is to return the md5 string to allow the check if it is the right
+ description. If no Description-md5 is found in the section it will be
+ calculated.
+ */
+MD5SumValue debListParser::Description_md5()
+{
+ string value = Section.FindS("Description-md5");
+
+ if (value.empty())
+ {
+ MD5Summation md5;
+ md5.Add((Description() + "\n").c_str());
+ return md5.Result();
+ } else
+ return MD5SumValue(value);
+}
+ /*}}}*/
// ListParser::UsePackage - Update a package structure /*{{{*/
// ---------------------------------------------------------------------
/* This is called to update the package with any new information
diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h
index 3a0e0421b..34bb29c72 100644
--- a/apt-pkg/deb/deblistparser.h
+++ b/apt-pkg/deb/deblistparser.h
@@ -12,6 +12,7 @@
#define PKGLIB_DEBLISTPARSER_H
#include <apt-pkg/pkgcachegen.h>
+#include <apt-pkg/indexfile.h>
#include <apt-pkg/tagfile.h>
class debListParser : public pkgCacheGenerator::ListParser
@@ -47,6 +48,9 @@ class debListParser : public pkgCacheGenerator::ListParser
virtual string Package();
virtual string Version();
virtual bool NewVersion(pkgCache::VerIterator Ver);
+ virtual string Description();
+ virtual string DescriptionLanguage();
+ virtual MD5SumValue Description_md5();
virtual unsigned short VersionHash();
virtual bool UsePackage(pkgCache::PkgIterator Pkg,
pkgCache::VerIterator Ver);
diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc
index 85e5b16b3..8cf31b326 100644
--- a/apt-pkg/deb/debmetaindex.cc
+++ b/apt-pkg/deb/debmetaindex.cc
@@ -157,6 +157,16 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool GetAll) const
ComputeIndexTargets(),
new indexRecords (Dist));
+ // Queue the translations
+ for (vector<const debSectionEntry *>::const_iterator I = SectionEntries.begin();
+ I != SectionEntries.end(); I++) {
+
+ if((*I)->IsSrc)
+ continue;
+ debTranslationsIndex i = debTranslationsIndex(URI,Dist,(*I)->Section);
+ i.GetIndexes(Owner);
+ }
+
return true;
}
@@ -181,11 +191,16 @@ vector <pkgIndexFile *> *debReleaseIndex::GetIndexFiles()
Indexes = new vector <pkgIndexFile*>;
for (vector<const debSectionEntry *>::const_iterator I = SectionEntries.begin();
- I != SectionEntries.end(); I++)
+ I != SectionEntries.end(); I++) {
if ((*I)->IsSrc)
Indexes->push_back(new debSourcesIndex (URI, Dist, (*I)->Section, IsTrusted()));
else
+ {
Indexes->push_back(new debPackagesIndex (URI, Dist, (*I)->Section, IsTrusted()));
+ Indexes->push_back(new debTranslationsIndex(URI, Dist, (*I)->Section));
+ }
+ }
+
return Indexes;
}
diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc
index 6652a6ad9..518988bb6 100644
--- a/apt-pkg/deb/debrecords.cc
+++ b/apt-pkg/deb/debrecords.cc
@@ -12,7 +12,9 @@
#pragma implementation "apt-pkg/debrecords.h"
#endif
#include <apt-pkg/debrecords.h>
+#include <apt-pkg/strutl.h>
#include <apt-pkg/error.h>
+#include <langinfo.h>
/*}}}*/
// RecordParser::debRecordParser - Constructor /*{{{*/
@@ -31,6 +33,10 @@ bool debRecordParser::Jump(pkgCache::VerFileIterator const &Ver)
{
return Tags.Jump(Section,Ver->Offset);
}
+bool debRecordParser::Jump(pkgCache::DescFileIterator const &Desc)
+{
+ return Tags.Jump(Section,Desc->Offset);
+}
/*}}}*/
// RecordParser::FileName - Return the archive filename on the site /*{{{*/
// ---------------------------------------------------------------------
@@ -77,7 +83,7 @@ string debRecordParser::Maintainer()
/* */
string debRecordParser::ShortDesc()
{
- string Res = Section.FindS("Description");
+ string Res = LongDesc();
string::size_type Pos = Res.find('\n');
if (Pos == string::npos)
return Res;
@@ -89,7 +95,20 @@ string debRecordParser::ShortDesc()
/* */
string debRecordParser::LongDesc()
{
- return Section.FindS("Description");
+ string orig, dest;
+ char *codeset = nl_langinfo(CODESET);
+
+ if (!Section.FindS("Description").empty())
+ orig = Section.FindS("Description").c_str();
+ else
+ orig = Section.FindS(("Description-" + pkgIndexFile::LanguageCode()).c_str()).c_str();
+
+ if (strcmp(codeset,"UTF-8") != 0) {
+ UTF8ToCodeset(codeset, orig, &dest);
+ orig = dest;
+ }
+
+ return orig;
}
/*}}}*/
// RecordParser::SourcePkg - Return the source package name if any /*{{{*/
diff --git a/apt-pkg/deb/debrecords.h b/apt-pkg/deb/debrecords.h
index efef2e588..24e5aab88 100644
--- a/apt-pkg/deb/debrecords.h
+++ b/apt-pkg/deb/debrecords.h
@@ -19,6 +19,7 @@
#endif
#include <apt-pkg/pkgrecords.h>
+#include <apt-pkg/indexfile.h>
#include <apt-pkg/tagfile.h>
class debRecordParser : public pkgRecords::Parser
@@ -30,6 +31,7 @@ class debRecordParser : public pkgRecords::Parser
protected:
virtual bool Jump(pkgCache::VerFileIterator const &Ver);
+ virtual bool Jump(pkgCache::DescFileIterator const &Desc);
public:
diff --git a/apt-pkg/deb/dpkgpm.h b/apt-pkg/deb/dpkgpm.h
index 2ff8a9ac7..0b181dc43 100644
--- a/apt-pkg/deb/dpkgpm.h
+++ b/apt-pkg/deb/dpkgpm.h
@@ -47,7 +47,7 @@ class pkgDPkgPM : public pkgPackageManager
bool RunScripts(const char *Cnf);
bool RunScriptsWithPkgs(const char *Cnf);
bool SendV2Pkgs(FILE *F);
-
+
// The Actuall installation implementation
virtual bool Install(PkgIterator Pkg,string File);
virtual bool Configure(PkgIterator Pkg);
diff --git a/apt-pkg/depcache.cc b/apt-pkg/depcache.cc
index 58d1d25e5..446bbe402 100644
--- a/apt-pkg/depcache.cc
+++ b/apt-pkg/depcache.cc
@@ -16,16 +16,52 @@
#include <apt-pkg/error.h>
#include <apt-pkg/sptr.h>
#include <apt-pkg/algorithms.h>
+
+#include <apt-pkg/fileutl.h>
#include <apt-pkg/configuration.h>
+#include <apt-pkg/pkgsystem.h>
+#include <apt-pkg/tagfile.h>
+
+#include <iostream>
+#include <sstream>
+#include <set>
#include <apti18n.h>
- /*}}}*/
+
+pkgDepCache::ActionGroup::ActionGroup(pkgDepCache &cache) :
+ cache(cache), released(false)
+{
+ ++cache.group_level;
+}
+
+void pkgDepCache::ActionGroup::release()
+{
+ if(!released)
+ {
+ if(cache.group_level == 0)
+ std::cerr << "W: Unbalanced action groups, expect badness" << std::endl;
+ else
+ {
+ --cache.group_level;
+
+ if(cache.group_level == 0)
+ cache.MarkAndSweep();
+ }
+
+ released = false;
+ }
+}
+
+pkgDepCache::ActionGroup::~ActionGroup()
+{
+ release();
+}
// DepCache::pkgDepCache - Constructors /*{{{*/
// ---------------------------------------------------------------------
/* */
pkgDepCache::pkgDepCache(pkgCache *pCache,Policy *Plcy) :
- Cache(pCache), PkgState(0), DepState(0)
+ group_level(0), Cache(pCache), PkgState(0), DepState(0)
{
delLocalPolicy = 0;
LocalPolicy = Plcy;
@@ -48,6 +84,10 @@ pkgDepCache::~pkgDepCache()
/* This allocats the extension buffers and initializes them. */
bool pkgDepCache::Init(OpProgress *Prog)
{
+ // Suppress mark updates during this operation (just in case) and
+ // run a mark operation when Init terminates.
+ ActionGroup actions(*this);
+
delete [] PkgState;
delete [] DepState;
PkgState = new StateCache[Head().PackageCount];
@@ -73,7 +113,7 @@ bool pkgDepCache::Init(OpProgress *Prog)
// Find the proper cache slot
StateCache &State = PkgState[I->ID];
State.iFlags = 0;
-
+
// Figure out the install version
State.CandidateVer = GetCandidateVer(I);
State.InstallVer = I.CurrentVer();
@@ -95,11 +135,130 @@ bool pkgDepCache::Init(OpProgress *Prog)
if(Prog != 0)
Prog->Done();
-
+
return true;
}
/*}}}*/
+bool pkgDepCache::readStateFile(OpProgress *Prog)
+{
+ FileFd state_file;
+ string state = _config->FindDir("Dir::State") + "extended_states";
+ if(FileExists(state)) {
+ state_file.Open(state, FileFd::ReadOnly);
+ int file_size = state_file.Size();
+ if(Prog != NULL)
+ Prog->OverallProgress(0, file_size, 1,
+ _("Reading state information"));
+
+ pkgTagFile tagfile(&state_file);
+ pkgTagSection section;
+ int amt=0;
+ while(tagfile.Step(section)) {
+ string pkgname = section.FindS("Package");
+ pkgCache::PkgIterator pkg=Cache->FindPkg(pkgname);
+ // Silently ignore unknown packages and packages with no actual
+ // version.
+ if(!pkg.end() && !pkg.VersionList().end()) {
+ short reason = section.FindI("Auto-Installed", 0);
+ if(reason > 0)
+ PkgState[pkg->ID].Flags |= Flag::Auto;
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::cout << "Auto-Installed : " << pkgname << std::endl;
+ amt+=section.size();
+ if(Prog != NULL)
+ Prog->OverallProgress(amt, file_size, 1,
+ _("Reading state information"));
+ }
+ if(Prog != NULL)
+ Prog->OverallProgress(file_size, file_size, 1,
+ _("Reading state information"));
+ }
+ }
+
+ return true;
+}
+
+bool pkgDepCache::writeStateFile(OpProgress *prog)
+{
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::clog << "pkgDepCache::writeStateFile()" << std::endl;
+
+ FileFd StateFile;
+ string state = _config->FindDir("Dir::State") + "extended_states";
+
+ // if it does not exist, create a empty one
+ if(!FileExists(state))
+ {
+ StateFile.Open(state, FileFd::WriteEmpty);
+ StateFile.Close();
+ }
+
+ // open it
+ if(!StateFile.Open(state, FileFd::ReadOnly))
+ return _error->Error(_("Failed to open StateFile %s"),
+ state.c_str());
+
+ FILE *OutFile;
+ string outfile = state + ".tmp";
+ if((OutFile = fopen(outfile.c_str(),"w")) == NULL)
+ return _error->Error(_("Failed to write temporary StateFile %s"),
+ outfile.c_str());
+
+ // first merge with the existing sections
+ pkgTagFile tagfile(&StateFile);
+ pkgTagSection section;
+ std::set<string> pkgs_seen;
+ const char *nullreorderlist[] = {0};
+ while(tagfile.Step(section)) {
+ string pkgname = section.FindS("Package");
+ // Silently ignore unknown packages and packages with no actual
+ // version.
+ pkgCache::PkgIterator pkg=Cache->FindPkg(pkgname);
+ if(pkg.end() || pkg.VersionList().end())
+ continue;
+ bool oldAuto = section.FindI("Auto-Installed");
+ bool newAuto = (PkgState[pkg->ID].Flags & Flag::Auto);
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::clog << "Update exisiting AutoInstall info: "
+ << pkg.Name() << std::endl;
+ TFRewriteData rewrite[2];
+ rewrite[0].Tag = "Auto-Installed";
+ rewrite[0].Rewrite = newAuto ? "1" : "0";
+ rewrite[0].NewTag = 0;
+ rewrite[1].Tag = 0;
+ TFRewrite(OutFile, section, nullreorderlist, rewrite);
+ fprintf(OutFile,"\n");
+ pkgs_seen.insert(pkgname);
+ }
+
+ // then write the ones we have not seen yet
+ std::ostringstream ostr;
+ for(pkgCache::PkgIterator pkg=Cache->PkgBegin(); !pkg.end(); pkg++) {
+ if(PkgState[pkg->ID].Flags & Flag::Auto) {
+ if (pkgs_seen.find(pkg.Name()) != pkgs_seen.end()) {
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::clog << "Skipping already written " << pkg.Name() << std::endl;
+ continue;
+ }
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::clog << "Writing new AutoInstall: "
+ << pkg.Name() << std::endl;
+ ostr.str(string(""));
+ ostr << "Package: " << pkg.Name()
+ << "\nAuto-Installed: 1\n\n";
+ fprintf(OutFile,ostr.str().c_str());
+ fprintf(OutFile,"\n");
+ }
+ }
+ fclose(OutFile);
+
+ // move the outfile over the real file
+ rename(outfile.c_str(), state.c_str());
+
+ return true;
+}
+
// DepCache::CheckDep - Checks a single dependency /*{{{*/
// ---------------------------------------------------------------------
/* This first checks the dependency against the main target package and
@@ -240,9 +399,11 @@ void pkgDepCache::AddStates(const PkgIterator &Pkg,int Add)
{
StateCache &State = PkgState[Pkg->ID];
- // The Package is broken
+ // The Package is broken (either minimal dep or policy dep)
if ((State.DepState & DepInstMin) != DepInstMin)
iBrokenCount += Add;
+ if ((State.DepState & DepInstPolicy) != DepInstPolicy)
+ iPolicyBrokenCount += Add;
// Bad state
if (Pkg.State() != PkgIterator::NeedsNothing)
@@ -453,12 +614,14 @@ void pkgDepCache::Update(OpProgress *Prog)
if (Prog != 0)
Prog->Progress(Done);
+
+ readStateFile(Prog);
}
/*}}}*/
// DepCache::Update - Update the deps list of a package /*{{{*/
// ---------------------------------------------------------------------
/* This is a helper for update that only does the dep portion of the scan.
- It is mainly ment to scan reverse dependencies. */
+ It is mainly meant to scan reverse dependencies. */
void pkgDepCache::Update(DepIterator D)
{
// Update the reverse deps
@@ -510,7 +673,7 @@ void pkgDepCache::Update(PkgIterator const &Pkg)
// DepCache::MarkKeep - Put the package in the keep state /*{{{*/
// ---------------------------------------------------------------------
/* */
-void pkgDepCache::MarkKeep(PkgIterator const &Pkg,bool Soft)
+void pkgDepCache::MarkKeep(PkgIterator const &Pkg, bool Soft, bool FromUser)
{
// Simplifies other routines.
if (Pkg.end() == true)
@@ -522,6 +685,9 @@ void pkgDepCache::MarkKeep(PkgIterator const &Pkg,bool Soft)
Pkg.CurrentVer().Downloadable() == false)
return;
+ /** \todo Can this be moved later in the method? */
+ ActionGroup group(*this);
+
/* We changed the soft state all the time so the UI is a bit nicer
to use */
StateCache &P = PkgState[Pkg->ID];
@@ -538,7 +704,8 @@ void pkgDepCache::MarkKeep(PkgIterator const &Pkg,bool Soft)
if (Pkg->VersionList == 0)
return;
- P.Flags &= ~Flag::Auto;
+ if(FromUser && !P.Marked)
+ P.Flags &= ~Flag::Auto;
RemoveSizes(Pkg);
RemoveStates(Pkg);
@@ -564,6 +731,8 @@ void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge)
if (Pkg.end() == true)
return;
+ ActionGroup group(*this);
+
// Check that it is not already marked for delete
StateCache &P = PkgState[Pkg->ID];
P.iFlags &= ~(AutoKept | Purge);
@@ -586,7 +755,6 @@ void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge)
else
P.Mode = ModeDelete;
P.InstallVer = 0;
- P.Flags &= Flag::Auto;
AddStates(Pkg);
Update(Pkg);
@@ -597,7 +765,8 @@ void pkgDepCache::MarkDelete(PkgIterator const &Pkg, bool rPurge)
// ---------------------------------------------------------------------
/* */
void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
- unsigned long Depth)
+ unsigned long Depth, bool FromUser,
+ bool ForceImportantDeps)
{
if (Depth > 100)
return;
@@ -606,26 +775,27 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
if (Pkg.end() == true)
return;
+ ActionGroup group(*this);
+
/* Check that it is not already marked for install and that it can be
installed */
StateCache &P = PkgState[Pkg->ID];
P.iFlags &= ~AutoKept;
- if (P.InstBroken() == false && (P.Mode == ModeInstall ||
+ if ((P.InstPolicyBroken() == false && P.InstBroken() == false) &&
+ (P.Mode == ModeInstall ||
P.CandidateVer == (Version *)Pkg.CurrentVer()))
{
if (P.CandidateVer == (Version *)Pkg.CurrentVer() && P.InstallVer == 0)
- MarkKeep(Pkg);
+ MarkKeep(Pkg, false, FromUser);
return;
}
// See if there is even any possible instalation candidate
if (P.CandidateVer == 0)
return;
-
// We dont even try to install virtual packages..
if (Pkg->VersionList == 0)
return;
-
/* Target the candidate version and remove the autoflag. We reset the
autoflag below if this was called recursively. Otherwise the user
should have the ability to de-auto a package by changing its state */
@@ -634,7 +804,20 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
P.Mode = ModeInstall;
P.InstallVer = P.CandidateVer;
- P.Flags &= ~Flag::Auto;
+
+ if(FromUser)
+ {
+ // Set it to manual if it's a new install or cancelling the
+ // removal of a garbage package.
+ if(P.Status == 2 || (!Pkg.CurrentVer().end() && !P.Marked))
+ P.Flags &= ~Flag::Auto;
+ }
+ else
+ {
+ // Set it to auto if this is a new install.
+ if(P.Status == 2)
+ P.Flags |= Flag::Auto;
+ }
if (P.CandidateVer == (Version *)Pkg.CurrentVer())
P.Mode = ModeKeep;
@@ -666,10 +849,43 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
/* Check if this dep should be consider for install. If it is a user
defined important dep and we are installed a new package then
- it will be installed. Otherwise we only worry about critical deps */
+ it will be installed. Otherwise we only check for important
+ deps that have changed from the installed version
+ */
if (IsImportantDep(Start) == false)
continue;
- if (Pkg->CurrentVer != 0 && Start.IsCritical() == false)
+
+ /* check if any ImportantDep() (but not Critial) where added
+ * since we installed the package
+ */
+ bool isNewImportantDep = false;
+ if(!ForceImportantDeps && !Start.IsCritical())
+ {
+ bool found=false;
+ VerIterator instVer = Pkg.CurrentVer();
+ if(!instVer.end())
+ {
+ for (DepIterator D = instVer.DependsList(); D.end() != true; D++)
+ {
+ //FIXME: deal better with or-groups(?)
+ DepIterator LocalStart = D;
+
+ if(IsImportantDep(D) && Start.TargetPkg() == D.TargetPkg())
+ found=true;
+ }
+ // this is a new dep if it was not found to be already
+ // a important dep of the installed pacakge
+ isNewImportantDep = !found;
+ }
+ }
+ if(isNewImportantDep)
+ if(_config->FindB("Debug::pkgDepCache::AutoInstall",false) == true)
+ std::clog << "new important dependency: "
+ << Start.TargetPkg().Name() << std::endl;
+
+ // skip important deps if the package is already installed
+ if (Pkg->CurrentVer != 0 && Start.IsCritical() == false
+ && !isNewImportantDep && !ForceImportantDeps)
continue;
/* If we are in an or group locate the first or that can
@@ -711,19 +927,18 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
}
}
- if (InstPkg.end() == false)
+ if (InstPkg.end() == false)
{
if(_config->FindB("Debug::pkgDepCache::AutoInstall",false) == true)
std::clog << "Installing " << InstPkg.Name()
<< " as dep of " << Pkg.Name()
<< std::endl;
- MarkInstall(InstPkg,true,Depth + 1);
-
- // Set the autoflag, after MarkInstall because MarkInstall unsets it
+ MarkInstall(InstPkg, true, Depth + 1, false, ForceImportantDeps);
+ // Set the autoflag, after MarkInstall because MarkInstall
+ // unsets it
if (P->CurrentVer == 0)
PkgState[InstPkg->ID].Flags |= Flag::Auto;
}
-
continue;
}
@@ -737,7 +952,6 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
PkgIterator Pkg = Ver.ParentPkg();
MarkDelete(Pkg);
- PkgState[Pkg->ID].Flags |= Flag::Auto;
}
continue;
}
@@ -749,6 +963,8 @@ void pkgDepCache::MarkInstall(PkgIterator const &Pkg,bool AutoInst,
/* */
void pkgDepCache::SetReInstall(PkgIterator const &Pkg,bool To)
{
+ ActionGroup group(*this);
+
RemoveSizes(Pkg);
RemoveStates(Pkg);
@@ -767,9 +983,11 @@ void pkgDepCache::SetReInstall(PkgIterator const &Pkg,bool To)
/* */
void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
{
+ ActionGroup group(*this);
+
pkgCache::PkgIterator Pkg = TargetVer.ParentPkg();
StateCache &P = PkgState[Pkg->ID];
-
+
RemoveSizes(Pkg);
RemoveStates(Pkg);
@@ -782,6 +1000,18 @@ void pkgDepCache::SetCandidateVersion(VerIterator TargetVer)
Update(Pkg);
AddSizes(Pkg);
}
+
+void pkgDepCache::MarkAuto(const PkgIterator &Pkg, bool Auto)
+{
+ StateCache &state = PkgState[Pkg->ID];
+
+ ActionGroup group(*this);
+
+ if(Auto)
+ state.Flags |= Flag::Auto;
+ else
+ state.Flags &= ~Flag::Auto;
+}
/*}}}*/
// StateCache::Update - Compute the various static display things /*{{{*/
// ---------------------------------------------------------------------
@@ -869,6 +1099,240 @@ pkgCache::VerIterator pkgDepCache::Policy::GetCandidateVer(PkgIterator Pkg)
/* */
bool pkgDepCache::Policy::IsImportantDep(DepIterator Dep)
{
- return Dep.IsCritical();
+ if(Dep.IsCritical())
+ return true;
+ else if(Dep->Type == pkgCache::Dep::Recommends)
+ {
+ if ( _config->FindB("APT::Install-Recommends", false))
+ return true;
+ // we suport a special mode to only install-recommends for certain
+ // sections
+ // FIXME: this is a meant as a temporarly solution until the
+ // recommends are cleaned up
+ string s = _config->Find("APT::Install-Recommends-Section","");
+ if(s.size() > 0)
+ {
+ const char *sec = Dep.TargetPkg().Section();
+ if (sec && strcmp(sec, s.c_str()) == 0)
+ return true;
+ }
+ }
+ else if(Dep->Type == pkgCache::Dep::Suggests)
+ return _config->FindB("APT::Install-Suggests", false);
+
+ return false;
}
/*}}}*/
+
+pkgDepCache::DefaultRootSetFunc::DefaultRootSetFunc()
+ : constructedSuccessfully(false)
+{
+ Configuration::Item const *Opts;
+ Opts = _config->Tree("APT::NeverAutoRemove");
+ if (Opts != 0 && Opts->Child != 0)
+ {
+ Opts = Opts->Child;
+ for (; Opts != 0; Opts = Opts->Next)
+ {
+ if (Opts->Value.empty() == true)
+ continue;
+
+ regex_t *p = new regex_t;
+ if(regcomp(p,Opts->Value.c_str(),
+ REG_EXTENDED | REG_ICASE | REG_NOSUB) != 0)
+ {
+ regfree(p);
+ delete p;
+ _error->Error("Regex compilation error for APT::NeverAutoRemove");
+ return;
+ }
+
+ rootSetRegexp.push_back(p);
+ }
+ }
+
+ constructedSuccessfully = true;
+}
+
+pkgDepCache::DefaultRootSetFunc::~DefaultRootSetFunc()
+{
+ for(unsigned int i = 0; i < rootSetRegexp.size(); i++)
+ {
+ regfree(rootSetRegexp[i]);
+ delete rootSetRegexp[i];
+ }
+}
+
+
+bool pkgDepCache::DefaultRootSetFunc::InRootSet(const pkgCache::PkgIterator &pkg)
+{
+ for(unsigned int i = 0; i < rootSetRegexp.size(); i++)
+ if (regexec(rootSetRegexp[i], pkg.Name(), 0, 0, 0) == 0)
+ return true;
+
+ return false;
+}
+
+pkgDepCache::InRootSetFunc *pkgDepCache::GetRootSetFunc()
+{
+ DefaultRootSetFunc *f = new DefaultRootSetFunc;
+ if(f->wasConstructedSuccessfully())
+ return f;
+ else
+ {
+ delete f;
+ return NULL;
+ }
+}
+
+bool pkgDepCache::MarkFollowsRecommends()
+{
+ return _config->FindB("APT::AutoRemove::RecommendsImportant", true);
+}
+
+bool pkgDepCache::MarkFollowsSuggests()
+{
+ return _config->FindB("APT::AutoRemove::SuggestsImportant", false);
+}
+
+// the main mark algorithm
+bool pkgDepCache::MarkRequired(InRootSetFunc &userFunc)
+{
+ bool follow_recommends;
+ bool follow_suggests;
+
+ // init the states
+ for(PkgIterator p = PkgBegin(); !p.end(); ++p)
+ {
+ PkgState[p->ID].Marked = false;
+ PkgState[p->ID].Garbage = false;
+
+ // debug output
+ if(_config->FindB("Debug::pkgAutoRemove",false)
+ && PkgState[p->ID].Flags & Flag::Auto)
+ std::clog << "AutoDep: " << p.Name() << std::endl;
+ }
+
+ // init vars
+ follow_recommends = MarkFollowsRecommends();
+ follow_suggests = MarkFollowsSuggests();
+
+
+
+ // do the mark part, this is the core bit of the algorithm
+ for(PkgIterator p = PkgBegin(); !p.end(); ++p)
+ {
+ if(!(PkgState[p->ID].Flags & Flag::Auto) ||
+ (p->Flags & Flag::Essential) ||
+ userFunc.InRootSet(p))
+
+ {
+ // the package is installed (and set to keep)
+ if(PkgState[p->ID].Keep() && !p.CurrentVer().end())
+ MarkPackage(p, p.CurrentVer(),
+ follow_recommends, follow_suggests);
+ // the package is to be installed
+ else if(PkgState[p->ID].Install())
+ MarkPackage(p, PkgState[p->ID].InstVerIter(*this),
+ follow_recommends, follow_suggests);
+ }
+ }
+
+ return true;
+}
+
+// mark a single package in Mark-and-Sweep
+void pkgDepCache::MarkPackage(const pkgCache::PkgIterator &pkg,
+ const pkgCache::VerIterator &ver,
+ bool follow_recommends,
+ bool follow_suggests)
+{
+ pkgDepCache::StateCache &state = PkgState[pkg->ID];
+ VerIterator candver = state.CandidateVerIter(*this);
+ VerIterator instver = state.InstVerIter(*this);
+
+#if 0
+ // If a package was garbage-collected but is now being marked, we
+ // should re-select it
+ // For cases when a pkg is set to upgrade and this trigger the
+ // removal of a no-longer used dependency. if the pkg is set to
+ // keep again later it will result in broken deps
+ if(state.Delete() && state.RemoveReason = Unused)
+ {
+ if(ver==candver)
+ mark_install(pkg, false, false, NULL);
+ else if(ver==pkg.CurrentVer())
+ MarkKeep(pkg, false, false);
+
+ instver=state.InstVerIter(*this);
+ }
+#endif
+
+ // Ignore versions other than the InstVer, and ignore packages
+ // that are already going to be removed or just left uninstalled.
+ if(!(ver == instver && !instver.end()))
+ return;
+
+ // if we are marked already we are done
+ if(state.Marked)
+ return;
+
+ //std::cout << "Setting Marked for: " << pkg.Name() << std::endl;
+ state.Marked=true;
+
+ if(!ver.end())
+ {
+ for(DepIterator d = ver.DependsList(); !d.end(); ++d)
+ {
+ if(d->Type == Dep::Depends ||
+ d->Type == Dep::PreDepends ||
+ (follow_recommends &&
+ d->Type == Dep::Recommends) ||
+ (follow_suggests &&
+ d->Type == Dep::Suggests))
+ {
+ // Try all versions of this package.
+ for(VerIterator V = d.TargetPkg().VersionList();
+ !V.end(); ++V)
+ {
+ if(_system->VS->CheckDep(V.VerStr(), d->CompareOp, d.TargetVer()))
+ {
+ MarkPackage(V.ParentPkg(), V,
+ follow_recommends, follow_suggests);
+ }
+ }
+ // Now try virtual packages
+ for(PrvIterator prv=d.TargetPkg().ProvidesList();
+ !prv.end(); ++prv)
+ {
+ if(_system->VS->CheckDep(prv.ProvideVersion(), d->CompareOp,
+ d.TargetVer()))
+ {
+ MarkPackage(prv.OwnerPkg(), prv.OwnerVer(),
+ follow_recommends, follow_suggests);
+ }
+ }
+ }
+ }
+ }
+}
+
+bool pkgDepCache::Sweep()
+{
+ // do the sweep
+ for(PkgIterator p=PkgBegin(); !p.end(); ++p)
+ {
+ StateCache &state=PkgState[p->ID];
+
+ // if it is not marked and it is installed, it's garbage
+ if(!state.Marked && (!p.CurrentVer().end() || state.Install()) &&
+ !state.Delete())
+ {
+ state.Garbage=true;
+ if(_config->FindB("Debug::pkgAutoRemove",false))
+ std::cout << "Garbage: " << p.Name() << std::endl;
+ }
+ }
+
+ return true;
+}
diff --git a/apt-pkg/depcache.h b/apt-pkg/depcache.h
index 6d51920e9..5cd5ea354 100644
--- a/apt-pkg/depcache.h
+++ b/apt-pkg/depcache.h
@@ -1,4 +1,4 @@
-// -*- mode: cpp; mode: fold -*-
+// -*- mode: c++; mode: fold -*-
// Description /*{{{*/
// $Id: depcache.h,v 1.14 2001/02/20 07:03:17 jgg Exp $
/* ######################################################################
@@ -45,9 +45,71 @@
#include <apt-pkg/pkgcache.h>
#include <apt-pkg/progress.h>
+#include <regex.h>
+
+#include <vector>
+
class pkgDepCache : protected pkgCache::Namespace
{
public:
+
+ /** \brief An arbitrary predicate on packages. */
+ class InRootSetFunc
+ {
+ public:
+ virtual bool InRootSet(const pkgCache::PkgIterator &pkg) {return false;};
+ virtual ~InRootSetFunc() {};
+ };
+
+ private:
+ /** \brief Mark a single package and all its unmarked important
+ * dependencies during mark-and-sweep.
+ *
+ * Recursively invokes itself to mark all dependencies of the
+ * package.
+ *
+ * \param pkg The package to mark.
+ *
+ * \param ver The version of the package that is to be marked.
+ *
+ * \param follow_recommends If \b true, recommendations of the
+ * package will be recursively marked.
+ *
+ * \param follow_suggests If \b true, suggestions of the package
+ * will be recursively marked.
+ */
+ void MarkPackage(const pkgCache::PkgIterator &pkg,
+ const pkgCache::VerIterator &ver,
+ bool follow_recommends,
+ bool follow_suggests);
+
+ /** \brief Update the Marked field of all packages.
+ *
+ * Each package's StateCache::Marked field will be set to \b true
+ * if and only if it can be reached from the root set. By
+ * default, the root set consists of the set of manually installed
+ * or essential packages, but it can be extended using the
+ * parameter #rootFunc.
+ *
+ * \param rootFunc A callback that can be used to add extra
+ * packages to the root set.
+ *
+ * \return \b false if an error occured.
+ */
+ bool MarkRequired(InRootSetFunc &rootFunc);
+
+ /** \brief Set the StateCache::Garbage flag on all packages that
+ * should be removed.
+ *
+ * Packages that were not marked by the last call to #MarkRequired
+ * are tested to see whether they are actually garbage. If so,
+ * they are marked as such.
+ *
+ * \return \b false if an error occured.
+ */
+ bool Sweep();
+
+ public:
// These flags are used in DepState
enum DepFlags {DepNow = (1 << 0),DepInstall = (1 << 1),DepCVer = (1 << 2),
@@ -63,6 +125,84 @@ class pkgDepCache : protected pkgCache::Namespace
enum VersionTypes {NowVersion, InstallVersion, CandidateVersion};
enum ModeList {ModeDelete = 0, ModeKeep = 1, ModeInstall = 2};
+
+ /** \brief Represents an active action group.
+ *
+ * An action group is a group of actions that are currently being
+ * performed. While an active group is active, certain routine
+ * clean-up actions that would normally be performed after every
+ * cache operation are delayed until the action group is
+ * completed. This is necessary primarily to avoid inefficiencies
+ * when modifying a large number of packages at once.
+ *
+ * This class represents an active action group. Creating an
+ * instance will create an action group; destroying one will
+ * destroy the corresponding action group.
+ *
+ * The following operations are suppressed by this class:
+ *
+ * - Keeping the Marked and Garbage flags up to date.
+ *
+ * \note This can be used in the future to easily accumulate
+ * atomic actions for undo or to display "what apt did anyway";
+ * e.g., change the counter of how many action groups are active
+ * to a std::set of pointers to them and use those to store
+ * information about what happened in a group in the group.
+ */
+ class ActionGroup
+ {
+ pkgDepCache &cache;
+
+ bool released;
+
+ /** Action groups are noncopyable. */
+ ActionGroup(const ActionGroup &other);
+ public:
+ /** \brief Create a new ActionGroup.
+ *
+ * \param cache The cache that this ActionGroup should
+ * manipulate.
+ *
+ * As long as this object exists, no automatic cleanup
+ * operations will be undertaken.
+ */
+ ActionGroup(pkgDepCache &cache);
+
+ /** \brief Clean up the action group before it is destroyed.
+ *
+ * If it is destroyed later, no second cleanup wil be run.
+ */
+ void release();
+
+ /** \brief Destroy the action group.
+ *
+ * If this is the last action group, the automatic cache
+ * cleanup operations will be undertaken.
+ */
+ ~ActionGroup();
+ };
+
+ /** \brief Returns \b true for packages matching a regular
+ * expression in APT::NeverAutoRemove.
+ */
+ class DefaultRootSetFunc : public InRootSetFunc
+ {
+ std::vector<regex_t *> rootSetRegexp;
+ bool constructedSuccessfully;
+
+ public:
+ DefaultRootSetFunc();
+ ~DefaultRootSetFunc();
+
+ /** \return \b true if the class initialized successfully, \b
+ * false otherwise. Used to avoid throwing an exception, since
+ * APT classes generally don't.
+ */
+ bool wasConstructedSuccessfully() const { return constructedSuccessfully; }
+
+ bool InRootSet(const pkgCache::PkgIterator &pkg);
+ };
+
struct StateCache
{
// Epoch stripped text versions of the two version fields
@@ -79,6 +219,17 @@ class pkgDepCache : protected pkgCache::Namespace
unsigned short Flags;
unsigned short iFlags; // Internal flags
+ /** \brief \b true if this package can be reached from the root set. */
+ bool Marked;
+
+ /** \brief \b true if this package is unused and should be removed.
+ *
+ * This differs from !#Marked, because it is possible that some
+ * unreachable packages will be protected from becoming
+ * garbage.
+ */
+ bool Garbage;
+
// Various tree indicators
signed char Status; // -1,0,1,2
unsigned char Mode; // ModeList
@@ -97,7 +248,9 @@ class pkgDepCache : protected pkgCache::Namespace
inline bool Downgrade() const {return Status < 0 && Mode == ModeInstall;};
inline bool Held() const {return Status != 0 && Keep();};
inline bool NowBroken() const {return (DepState & DepNowMin) != DepNowMin;};
+ inline bool NowPolicyBroken() const {return (DepState & DepNowPolicy) != DepNowPolicy;};
inline bool InstBroken() const {return (DepState & DepInstMin) != DepInstMin;};
+ inline bool InstPolicyBroken() const {return (DepState & DepInstPolicy) != DepInstPolicy;};
inline bool Install() const {return Mode == ModeInstall;};
inline VerIterator InstVerIter(pkgCache &Cache)
{return VerIterator(Cache,InstallVer);};
@@ -119,6 +272,14 @@ class pkgDepCache : protected pkgCache::Namespace
virtual ~Policy() {};
};
+
+ private:
+ /** The number of open "action groups"; certain post-action
+ * operations are suppressed if this number is > 0.
+ */
+ int group_level;
+
+ friend class ActionGroup;
protected:
@@ -133,6 +294,7 @@ class pkgDepCache : protected pkgCache::Namespace
unsigned long iDelCount;
unsigned long iKeepCount;
unsigned long iBrokenCount;
+ unsigned long iPolicyBrokenCount;
unsigned long iBadCount;
Policy *delLocalPolicy; // For memory clean up..
@@ -182,16 +344,69 @@ class pkgDepCache : protected pkgCache::Namespace
inline StateCache &operator [](PkgIterator const &I) {return PkgState[I->ID];};
inline unsigned char &operator [](DepIterator const &I) {return DepState[I->ID];};
- // Manipulators
- void MarkKeep(PkgIterator const &Pkg,bool Soft = false);
+ /** \return A function identifying packages in the root set other
+ * than manually installed packages and essential packages, or \b
+ * NULL if an error occurs.
+ *
+ * \todo Is this the best place for this function? Perhaps the
+ * settings for mark-and-sweep should be stored in a single
+ * external class?
+ */
+ virtual InRootSetFunc *GetRootSetFunc();
+
+ /** \return \b true if the garbage collector should follow recommendations.
+ */
+ virtual bool MarkFollowsRecommends();
+
+ /** \return \b true if the garbage collector should follow suggestions.
+ */
+ virtual bool MarkFollowsSuggests();
+
+ /** \brief Update the Marked and Garbage fields of all packages.
+ *
+ * This routine is implicitly invoked after all state manipulators
+ * and when an ActionGroup is destroyed. It invokes #MarkRequired
+ * and #Sweep to do its dirty work.
+ *
+ * \param rootFunc A predicate that returns \b true for packages
+ * that should be added to the root set.
+ */
+ bool MarkAndSweep(InRootSetFunc &rootFunc)
+ {
+ return MarkRequired(rootFunc) && Sweep();
+ }
+
+ bool MarkAndSweep()
+ {
+ std::auto_ptr<InRootSetFunc> f(GetRootSetFunc());
+ if(f.get() != NULL)
+ return MarkAndSweep(*f.get());
+ else
+ return false;
+ }
+
+ /** \name State Manipulators
+ */
+ // @{
+ void MarkKeep(PkgIterator const &Pkg, bool Soft = false,
+ bool FromUser = true);
void MarkDelete(PkgIterator const &Pkg,bool Purge = false);
void MarkInstall(PkgIterator const &Pkg,bool AutoInst = true,
- unsigned long Depth = 0);
+ unsigned long Depth = 0, bool FromUser = true,
+ bool ForceImportantDeps = false);
void SetReInstall(PkgIterator const &Pkg,bool To);
void SetCandidateVersion(VerIterator TargetVer);
+
+ /** Set the "is automatically installed" flag of Pkg. */
+ void MarkAuto(const PkgIterator &Pkg, bool Auto);
+ // @}
// This is for debuging
void Update(OpProgress *Prog = 0);
+
+ // read persistent states
+ bool readStateFile(OpProgress *prog);
+ bool writeStateFile(OpProgress *prog);
// Size queries
inline double UsrSize() {return iUsrSize;};
@@ -200,6 +415,7 @@ class pkgDepCache : protected pkgCache::Namespace
inline unsigned long KeepCount() {return iKeepCount;};
inline unsigned long InstCount() {return iInstCount;};
inline unsigned long BrokenCount() {return iBrokenCount;};
+ inline unsigned long PolicyBrokenCount() {return iPolicyBrokenCount;};
inline unsigned long BadCount() {return iBadCount;};
bool Init(OpProgress *Prog);
diff --git a/apt-pkg/indexcopy.cc b/apt-pkg/indexcopy.cc
index 1f65062f7..c9dded134 100644
--- a/apt-pkg/indexcopy.cc
+++ b/apt-pkg/indexcopy.cc
@@ -32,6 +32,8 @@
using namespace std;
+
+
// IndexCopy::CopyPackages - Copy the package files from the CD /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -512,10 +514,10 @@ bool SourceCopy::RewriteEntry(FILE *Target,string File)
fputc('\n',Target);
return true;
}
-
-
/*}}}*/
-
+// SigVerify::Verify - Verify a files md5sum against its metaindex /*{{{*/
+// ---------------------------------------------------------------------
+/* */
bool SigVerify::Verify(string prefix, string file, indexRecords *MetaIndex)
{
const indexRecords::checkSum *Record = MetaIndex->Lookup(file);
@@ -670,3 +672,178 @@ bool SigVerify::CopyAndVerify(string CDROM,string Name,vector<string> &SigList,
return true;
}
+
+
+bool TranslationsCopy::CopyTranslations(string CDROM,string Name,vector<string> &List,
+ pkgCdromStatus *log)
+{
+ OpProgress *Progress = NULL;
+ if (List.size() == 0)
+ return true;
+
+ if(log)
+ Progress = log->GetOpProgress();
+
+ bool Debug = _config->FindB("Debug::aptcdrom",false);
+
+ // Prepare the progress indicator
+ unsigned long TotalSize = 0;
+ for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ {
+ struct stat Buf;
+ if (stat(string(*I).c_str(),&Buf) != 0 &&
+ stat(string(*I + ".gz").c_str(),&Buf) != 0)
+ return _error->Errno("stat","Stat failed for %s",
+ string(*I).c_str());
+ TotalSize += Buf.st_size;
+ }
+
+ unsigned long CurrentSize = 0;
+ unsigned int NotFound = 0;
+ unsigned int WrongSize = 0;
+ unsigned int Packages = 0;
+ for (vector<string>::iterator I = List.begin(); I != List.end(); I++)
+ {
+ string OrigPath = string(*I,CDROM.length());
+ unsigned long FileSize = 0;
+
+ // Open the package file
+ FileFd Pkg;
+ if (FileExists(*I) == true)
+ {
+ Pkg.Open(*I,FileFd::ReadOnly);
+ FileSize = Pkg.Size();
+ }
+ else
+ {
+ FileFd From(*I + ".gz",FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ return false;
+ FileSize = From.Size();
+
+ // Get a temp file
+ FILE *tmp = tmpfile();
+ if (tmp == 0)
+ return _error->Errno("tmpfile","Unable to create a tmp file");
+ Pkg.Fd(dup(fileno(tmp)));
+ fclose(tmp);
+
+ // Fork gzip
+ pid_t Process = fork();
+ if (Process < 0)
+ return _error->Errno("fork","Couldn't fork gzip");
+
+ // The child
+ if (Process == 0)
+ {
+ dup2(From.Fd(),STDIN_FILENO);
+ dup2(Pkg.Fd(),STDOUT_FILENO);
+ SetCloseExec(STDIN_FILENO,false);
+ SetCloseExec(STDOUT_FILENO,false);
+
+ const char *Args[3];
+ string Tmp = _config->Find("Dir::bin::gzip","gzip");
+ Args[0] = Tmp.c_str();
+ Args[1] = "-d";
+ Args[2] = 0;
+ execvp(Args[0],(char **)Args);
+ exit(100);
+ }
+
+ // Wait for gzip to finish
+ if (ExecWait(Process,_config->Find("Dir::bin::gzip","gzip").c_str(),false) == false)
+ return _error->Error("gzip failed, perhaps the disk is full.");
+
+ Pkg.Seek(0);
+ }
+ pkgTagFile Parser(&Pkg);
+ if (_error->PendingError() == true)
+ return false;
+
+ // Open the output file
+ char S[400];
+ snprintf(S,sizeof(S),"cdrom:[%s]/%s",Name.c_str(),
+ (*I).c_str() + CDROM.length());
+ string TargetF = _config->FindDir("Dir::State::lists") + "partial/";
+ TargetF += URItoFileName(S);
+ if (_config->FindB("APT::CDROM::NoAct",false) == true)
+ TargetF = "/dev/null";
+ FileFd Target(TargetF,FileFd::WriteEmpty);
+ FILE *TargetFl = fdopen(dup(Target.Fd()),"w");
+ if (_error->PendingError() == true)
+ return false;
+ if (TargetFl == 0)
+ return _error->Errno("fdopen","Failed to reopen fd");
+
+ // Setup the progress meter
+ if(Progress)
+ Progress->OverallProgress(CurrentSize,TotalSize,FileSize,
+ string("Reading Translation Indexes"));
+
+ // Parse
+ if(Progress)
+ Progress->SubProgress(Pkg.Size());
+ pkgTagSection Section;
+ this->Section = &Section;
+ string Prefix;
+ unsigned long Hits = 0;
+ unsigned long Chop = 0;
+ while (Parser.Step(Section) == true)
+ {
+ if(Progress)
+ Progress->Progress(Parser.Offset());
+
+ const char *Start;
+ const char *Stop;
+ Section.GetSection(Start,Stop);
+ fwrite(Start,Stop-Start, 1, TargetFl);
+ fputc('\n',TargetFl);
+
+ Packages++;
+ Hits++;
+ }
+ fclose(TargetFl);
+
+ if (Debug == true)
+ cout << " Processed by using Prefix '" << Prefix << "' and chop " << Chop << endl;
+
+ if (_config->FindB("APT::CDROM::NoAct",false) == false)
+ {
+ // Move out of the partial directory
+ Target.Close();
+ string FinalF = _config->FindDir("Dir::State::lists");
+ FinalF += URItoFileName(S);
+ if (rename(TargetF.c_str(),FinalF.c_str()) != 0)
+ return _error->Errno("rename","Failed to rename");
+ }
+
+
+ CurrentSize += FileSize;
+ }
+ if(Progress)
+ Progress->Done();
+
+ // Some stats
+ if(log) {
+ stringstream msg;
+ if(NotFound == 0 && WrongSize == 0)
+ ioprintf(msg, _("Wrote %i records.\n"), Packages);
+ else if (NotFound != 0 && WrongSize == 0)
+ ioprintf(msg, _("Wrote %i records with %i missing files.\n"),
+ Packages, NotFound);
+ else if (NotFound == 0 && WrongSize != 0)
+ ioprintf(msg, _("Wrote %i records with %i mismatched files\n"),
+ Packages, WrongSize);
+ if (NotFound != 0 && WrongSize != 0)
+ ioprintf(msg, _("Wrote %i records with %i missing files and %i mismatched files\n"), Packages, NotFound, WrongSize);
+ }
+
+ if (Packages == 0)
+ _error->Warning("No valid records were found.");
+
+ if (NotFound + WrongSize > 10)
+ _error->Warning("Alot of entries were discarded, something may be wrong.\n");
+
+
+ return true;
+}
diff --git a/apt-pkg/indexcopy.h b/apt-pkg/indexcopy.h
index 4dcb2b46d..7778ae595 100644
--- a/apt-pkg/indexcopy.h
+++ b/apt-pkg/indexcopy.h
@@ -70,6 +70,17 @@ class SourceCopy : public IndexCopy
public:
};
+class TranslationsCopy
+{
+ protected:
+ pkgTagSection *Section;
+
+ public:
+ bool CopyTranslations(string CDROM,string Name,vector<string> &List,
+ pkgCdromStatus *log);
+};
+
+
class SigVerify
{
bool Verify(string prefix,string file, indexRecords *records);
@@ -81,4 +92,6 @@ class SigVerify
vector<string> PkgList,vector<string> SrcList);
};
+
+
#endif
diff --git a/apt-pkg/indexfile.cc b/apt-pkg/indexfile.cc
index 49665161d..bb2210bf6 100644
--- a/apt-pkg/indexfile.cc
+++ b/apt-pkg/indexfile.cc
@@ -12,8 +12,11 @@
#pragma implementation "apt-pkg/indexfile.h"
#endif
+#include <apt-pkg/configuration.h>
#include <apt-pkg/indexfile.h>
#include <apt-pkg/error.h>
+
+#include <clocale>
/*}}}*/
// Global list of Item supported
@@ -67,3 +70,63 @@ string pkgIndexFile::SourceInfo(pkgSrcRecords::Parser const &Record,
return string();
}
/*}}}*/
+// IndexFile::TranslationsAvailable - Check if will use Translation /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgIndexFile::TranslationsAvailable()
+{
+ const string Translation = _config->Find("APT::Acquire::Translation");
+
+ if (Translation.compare("none") != 0)
+ return CheckLanguageCode(LanguageCode().c_str());
+ else
+ return false;
+}
+ /*}}}*/
+// IndexFile::CheckLanguageCode - Check the Language Code /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+/* common cases: de_DE, de_DE@euro, de_DE.UTF-8, de_DE.UTF-8@euro,
+ de_DE.ISO8859-1, tig_ER
+ more in /etc/gdm/locale.conf
+*/
+
+bool pkgIndexFile::CheckLanguageCode(const char *Lang)
+{
+ if (strlen(Lang) == 2 || (strlen(Lang) == 5 && Lang[2] == '_'))
+ return true;
+
+ if (strcmp(Lang,"C") != 0)
+ _error->Warning("Wrong language code %s", Lang);
+
+ return false;
+}
+ /*}}}*/
+// IndexFile::LanguageCode - Return the Language Code /*{{{*/
+// ---------------------------------------------------------------------
+/* return the language code */
+string pkgIndexFile::LanguageCode()
+{
+ const string Translation = _config->Find("APT::Acquire::Translation");
+
+ if (Translation.compare("environment") == 0)
+ {
+ string lang = std::setlocale(LC_MESSAGES,NULL);
+
+ // we have a mapping of the language codes that contains all the language
+ // codes that need the country code as well
+ // (like pt_BR, pt_PT, sv_SE, zh_*, en_*)
+ char *need_full_langcode[] = { "pt","sv","zh","en", NULL };
+ for(char **s = need_full_langcode;*s != NULL; s++)
+ if(lang.find(*s) == 0)
+ return lang.substr(0,5);
+
+ if(lang.size() > 2)
+ return lang.substr(0,2);
+ else
+ return lang;
+ }
+ else
+ return Translation;
+}
+ /*}}}*/
diff --git a/apt-pkg/indexfile.h b/apt-pkg/indexfile.h
index 61049f4bd..d5d1cf57a 100644
--- a/apt-pkg/indexfile.h
+++ b/apt-pkg/indexfile.h
@@ -5,10 +5,11 @@
Index File - Abstraction for an index of archive/source file.
- There are 3 primary sorts of index files, all represented by this
+ There are 4 primary sorts of index files, all represented by this
class:
Binary index files
+ Binary translation files
Bianry index files decribing the local system
Source index files
@@ -80,6 +81,10 @@ class pkgIndexFile
virtual bool MergeFileProvides(pkgCacheGenerator &/*Gen*/,OpProgress &/*Prog*/) const {return true;};
virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const;
+ static bool TranslationsAvailable();
+ static bool CheckLanguageCode(const char *Lang);
+ static string LanguageCode();
+
bool IsTrusted() const { return Trusted; };
pkgIndexFile(bool Trusted): Trusted(Trusted) {};
diff --git a/apt-pkg/init.cc b/apt-pkg/init.cc
index b47378d4a..579a19ab9 100644
--- a/apt-pkg/init.cc
+++ b/apt-pkg/init.cc
@@ -40,6 +40,8 @@ bool pkgInitConfig(Configuration &Cnf)
else
Cnf.Set("APT::Architecture",COMMON_OS "-" COMMON_CPU);
Cnf.Set("APT::Build-Essential::", "build-essential");
+ Cnf.Set("APT::Install-Recommends", false);
+ Cnf.Set("APT::Install-Suggests", false);
Cnf.Set("Dir","/");
// State
@@ -102,6 +104,9 @@ bool pkgInitConfig(Configuration &Cnf)
bindtextdomain(textdomain(0),Cnf.FindDir("Dir::Locale").c_str());
}
#endif
+
+ // Translation
+ Cnf.Set("APT::Acquire::Translation", "environment");
return true;
}
diff --git a/apt-pkg/init.h b/apt-pkg/init.h
index 8255b406a..c6457cbca 100644
--- a/apt-pkg/init.h
+++ b/apt-pkg/init.h
@@ -17,8 +17,8 @@
#include <apt-pkg/pkgsystem.h>
// See the makefile
-#define APT_PKG_MAJOR 3
-#define APT_PKG_MINOR 11
+#define APT_PKG_MAJOR 4
+#define APT_PKG_MINOR 2
#define APT_PKG_RELEASE 0
extern const char *pkgVersion;
diff --git a/apt-pkg/makefile b/apt-pkg/makefile
index 7e5feae53..59df6c0ef 100644
--- a/apt-pkg/makefile
+++ b/apt-pkg/makefile
@@ -13,7 +13,7 @@ include ../buildlib/defaults.mak
# methods/makefile - FIXME
LIBRARY=apt-pkg
LIBEXT=$(GLIBC_VER)$(LIBSTDCPP_VER)
-MAJOR=3.11
+MAJOR=4.2
MINOR=0
SLIBS=$(PTHREADLIB) $(INTLLIBS)
APT_DOMAIN:=libapt-pkg$(MAJOR)
diff --git a/apt-pkg/packagemanager.cc b/apt-pkg/packagemanager.cc
index 4b3dd8be2..b0dd43629 100644
--- a/apt-pkg/packagemanager.cc
+++ b/apt-pkg/packagemanager.cc
@@ -106,7 +106,7 @@ bool pkgPackageManager::FixMissing()
// Okay, this file is missing and we need it. Mark it for keep
Bad = true;
- Cache.MarkKeep(I);
+ Cache.MarkKeep(I, false, false);
}
// We have to empty the list otherwise it will not have the new changes
@@ -631,14 +631,11 @@ pkgPackageManager::OrderResult pkgPackageManager::OrderInstall()
// ---------------------------------------------------------------------
/* This uses the filenames in FileNames and the information in the
DepCache to perform the installation of packages.*/
-pkgPackageManager::OrderResult pkgPackageManager::DoInstall(int status_fd)
+pkgPackageManager::OrderResult pkgPackageManager::DoInstall(int statusFd)
{
- OrderResult Res = OrderInstall();
- if(Debug)
- std::clog << "OrderInstall() returned: " << Res << std::endl;
- if (Res != Failed)
- if (Go(status_fd) == false)
- return Failed;
- return Res;
+ if(DoInstallPreFork() == Failed)
+ return Failed;
+
+ return DoInstallPostFork(statusFd);
}
/*}}}*/
diff --git a/apt-pkg/packagemanager.h b/apt-pkg/packagemanager.h
index f64637d03..48f53576c 100644
--- a/apt-pkg/packagemanager.h
+++ b/apt-pkg/packagemanager.h
@@ -28,7 +28,9 @@
#endif
#include <string>
+#include <iostream>
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/depcache.h>
using std::string;
@@ -70,13 +72,39 @@ class pkgPackageManager : protected pkgCache::Namespace
virtual bool Remove(PkgIterator /*Pkg*/,bool /*Purge*/=false) {return false;};
virtual bool Go(int statusFd=-1) {return true;};
virtual void Reset() {};
-
+
+ // the result of the operation
+ OrderResult Res;
+
public:
// Main action members
bool GetArchives(pkgAcquire *Owner,pkgSourceList *Sources,
pkgRecords *Recs);
- OrderResult DoInstall(int statusFd=-1);
+
+ // Do the installation
+ OrderResult DoInstall(int statusFd=-1);
+
+ // stuff that needs to be done before the fork() of a library that
+ // uses apt
+ OrderResult DoInstallPreFork() {
+ Res = OrderInstall();
+ return Res;
+ };
+
+ // stuff that needs to be done after the fork
+ OrderResult DoInstallPostFork(int statusFd=-1) {
+ bool goResult = Go(statusFd);
+ if(goResult == false)
+ return Failed;
+
+ // if all was fine update the state file
+ if(Res == Completed) {
+ Cache.writeStateFile(NULL);
+ }
+ return Res;
+ };
+
bool FixMissing();
pkgPackageManager(pkgDepCache *Cache);
diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc
index 9926befe9..162ab4f27 100644
--- a/apt-pkg/pkgcache.cc
+++ b/apt-pkg/pkgcache.cc
@@ -26,6 +26,7 @@
#endif
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/indexfile.h>
#include <apt-pkg/version.h>
#include <apt-pkg/error.h>
#include <apt-pkg/strutl.h>
@@ -43,6 +44,7 @@
using std::string;
+
// Cache::Header::Header - Constructor /*{{{*/
// ---------------------------------------------------------------------
/* Simply initialize the header */
@@ -52,7 +54,7 @@ pkgCache::Header::Header()
/* Whenever the structures change the major version should be bumped,
whenever the generator changes the minor version should be bumped. */
- MajorVersion = 4;
+ MajorVersion = 5;
MinorVersion = 0;
Dirty = false;
@@ -60,17 +62,22 @@ pkgCache::Header::Header()
PackageSz = sizeof(pkgCache::Package);
PackageFileSz = sizeof(pkgCache::PackageFile);
VersionSz = sizeof(pkgCache::Version);
+ DescriptionSz = sizeof(pkgCache::Description);
DependencySz = sizeof(pkgCache::Dependency);
ProvidesSz = sizeof(pkgCache::Provides);
VerFileSz = sizeof(pkgCache::VerFile);
+ DescFileSz = sizeof(pkgCache::DescFile);
PackageCount = 0;
VersionCount = 0;
+ DescriptionCount = 0;
DependsCount = 0;
PackageFileCount = 0;
VerFileCount = 0;
+ DescFileCount = 0;
ProvidesCount = 0;
MaxVerFileSize = 0;
+ MaxDescFileSize = 0;
FileList = 0;
StringList = 0;
@@ -89,8 +96,10 @@ bool pkgCache::Header::CheckSizes(Header &Against) const
PackageSz == Against.PackageSz &&
PackageFileSz == Against.PackageFileSz &&
VersionSz == Against.VersionSz &&
+ DescriptionSz == Against.DescriptionSz &&
DependencySz == Against.DependencySz &&
VerFileSz == Against.VerFileSz &&
+ DescFileSz == Against.DescFileSz &&
ProvidesSz == Against.ProvidesSz)
return true;
return false;
@@ -115,8 +124,10 @@ bool pkgCache::ReMap()
HeaderP = (Header *)Map.Data();
PkgP = (Package *)Map.Data();
VerFileP = (VerFile *)Map.Data();
+ DescFileP = (DescFile *)Map.Data();
PkgFileP = (PackageFile *)Map.Data();
VerP = (Version *)Map.Data();
+ DescP = (Description *)Map.Data();
ProvideP = (Provides *)Map.Data();
DepP = (Dependency *)Map.Data();
StringItemP = (StringItem *)Map.Data();
@@ -235,11 +246,11 @@ const char *pkgCache::Priority(unsigned char Prio)
return 0;
}
/*}}}*/
-
// Bases for iterator classes /*{{{*/
void pkgCache::VerIterator::_dummy() {}
void pkgCache::DepIterator::_dummy() {}
void pkgCache::PrvIterator::_dummy() {}
+void pkgCache::DescIterator::_dummy() {}
/*}}}*/
// PkgIterator::operator ++ - Postfix incr /*{{{*/
// ---------------------------------------------------------------------
@@ -599,3 +610,20 @@ string pkgCache::PkgFileIterator::RelStr()
return Res;
}
/*}}}*/
+// VerIterator::TranslatedDescription - Return the a DescIter for locale/*{{{*/
+// ---------------------------------------------------------------------
+/* return a DescIter for the current locale or the default if none is
+ * found
+ */
+pkgCache::DescIterator pkgCache::VerIterator::TranslatedDescription() const
+{
+ pkgCache::DescIterator DescDefault = DescriptionList();
+ pkgCache::DescIterator Desc = DescDefault;
+ for (; Desc.end() == false; Desc++)
+ if (pkgIndexFile::LanguageCode() == Desc.LanguageCode())
+ break;
+ if (Desc.end() == true) Desc = DescDefault;
+ return Desc;
+};
+
+ /*}}}*/
diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h
index 587d97534..c7a3172cc 100644
--- a/apt-pkg/pkgcache.h
+++ b/apt-pkg/pkgcache.h
@@ -38,24 +38,30 @@ class pkgCache
struct Package;
struct PackageFile;
struct Version;
+ struct Description;
struct Provides;
struct Dependency;
struct StringItem;
struct VerFile;
+ struct DescFile;
// Iterators
class PkgIterator;
class VerIterator;
+ class DescIterator;
class DepIterator;
class PrvIterator;
class PkgFileIterator;
class VerFileIterator;
+ class DescFileIterator;
friend class PkgIterator;
friend class VerIterator;
+ friend class DescInterator;
friend class DepIterator;
friend class PrvIterator;
friend class PkgFileIterator;
friend class VerFileIterator;
+ friend class DescFileIterator;
class Namespace;
@@ -98,8 +104,10 @@ class pkgCache
Header *HeaderP;
Package *PkgP;
VerFile *VerFileP;
+ DescFile *DescFileP;
PackageFile *PkgFileP;
Version *VerP;
+ Description *DescP;
Provides *ProvideP;
Dependency *DepP;
StringItem *StringItemP;
@@ -151,16 +159,20 @@ struct pkgCache::Header
unsigned short PackageSz;
unsigned short PackageFileSz;
unsigned short VersionSz;
+ unsigned short DescriptionSz;
unsigned short DependencySz;
unsigned short ProvidesSz;
unsigned short VerFileSz;
+ unsigned short DescFileSz;
// Structure counts
unsigned long PackageCount;
unsigned long VersionCount;
+ unsigned long DescriptionCount;
unsigned long DependsCount;
unsigned long PackageFileCount;
unsigned long VerFileCount;
+ unsigned long DescFileCount;
unsigned long ProvidesCount;
// Offsets
@@ -169,10 +181,11 @@ struct pkgCache::Header
map_ptrloc VerSysName; // StringTable
map_ptrloc Architecture; // StringTable
unsigned long MaxVerFileSize;
+ unsigned long MaxDescFileSize;
/* Allocation pools, there should be one of these for each structure
excluding the header */
- DynamicMMap::Pool Pools[7];
+ DynamicMMap::Pool Pools[8];
// Rapid package name lookup
map_ptrloc HashTable[2*1048];
@@ -193,7 +206,7 @@ struct pkgCache::Package
map_ptrloc NextPackage; // Package
map_ptrloc RevDepends; // Dependency
map_ptrloc ProvidesList; // Provides
-
+
// Install/Remove/Purge etc
unsigned char SelectedState; // What
unsigned char InstState; // Flags
@@ -232,6 +245,14 @@ struct pkgCache::VerFile
unsigned short Size;
};
+struct pkgCache::DescFile
+{
+ map_ptrloc File; // PackageFile
+ map_ptrloc NextFile; // PkgVerFile
+ map_ptrloc Offset; // File offset
+ unsigned short Size;
+};
+
struct pkgCache::Version
{
map_ptrloc VerStr; // Stringtable
@@ -241,6 +262,7 @@ struct pkgCache::Version
// Lists
map_ptrloc FileList; // VerFile
map_ptrloc NextVer; // Version
+ map_ptrloc DescriptionList; // Description
map_ptrloc DependsList; // Dependency
map_ptrloc ParentPkg; // Package
map_ptrloc ProvidesList; // Provides
@@ -252,6 +274,22 @@ struct pkgCache::Version
unsigned char Priority;
};
+struct pkgCache::Description
+{
+ // Language Code store the description translation language code. If
+ // the value has a 0 lenght then this is readed using the Package
+ // file else the Translation-CODE are used.
+ map_ptrloc language_code; // StringTable
+ map_ptrloc md5sum; // StringTable
+
+ // Linked list
+ map_ptrloc FileList; // DescFile
+ map_ptrloc NextDesc; // Description
+ map_ptrloc ParentPkg; // Package
+
+ unsigned short ID;
+};
+
struct pkgCache::Dependency
{
map_ptrloc Version; // Stringtable
@@ -299,11 +337,13 @@ class pkgCache::Namespace
typedef pkgCache::PkgIterator PkgIterator;
typedef pkgCache::VerIterator VerIterator;
+ typedef pkgCache::DescIterator DescIterator;
typedef pkgCache::DepIterator DepIterator;
typedef pkgCache::PrvIterator PrvIterator;
typedef pkgCache::PkgFileIterator PkgFileIterator;
typedef pkgCache::VerFileIterator VerFileIterator;
typedef pkgCache::Version Version;
+ typedef pkgCache::Description Description;
typedef pkgCache::Package Package;
typedef pkgCache::Header Header;
typedef pkgCache::Dep Dep;
diff --git a/apt-pkg/pkgcachegen.cc b/apt-pkg/pkgcachegen.cc
index de854bee5..3f02725c1 100644
--- a/apt-pkg/pkgcachegen.cc
+++ b/apt-pkg/pkgcachegen.cc
@@ -26,6 +26,8 @@
#include <apt-pkg/sptr.h>
#include <apt-pkg/pkgsystem.h>
+#include <apt-pkg/tagfile.h>
+
#include <apti18n.h>
#include <vector>
@@ -125,16 +127,46 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
string Version = List.Version();
if (Version.empty() == true)
{
+ // we first process the package, then the descriptions
+ // (this has the bonus that we get MMap error when we run out
+ // of MMap space)
if (List.UsePackage(Pkg,pkgCache::VerIterator(Cache)) == false)
return _error->Error(_("Error occurred while processing %s (UsePackage1)"),
PackageName.c_str());
+
+ // Find the right version to write the description
+ MD5SumValue CurMd5 = List.Description_md5();
+ pkgCache::VerIterator Ver = Pkg.VersionList();
+ map_ptrloc *LastVer = &Pkg->VersionList;
+
+ for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
+ {
+ pkgCache::DescIterator Desc = Ver.DescriptionList();
+ map_ptrloc *LastDesc = &Ver->DescriptionList;
+
+ for (; Desc.end() == false; LastDesc = &Desc->NextDesc, Desc++)
+ {
+
+ if (MD5SumValue(Desc.md5()) == CurMd5)
+ {
+ // Add new description
+ *LastDesc = NewDescription(Desc, List.DescriptionLanguage(), CurMd5, *LastDesc);
+ Desc->ParentPkg = Pkg.Index();
+
+ if (NewFileDesc(Desc,List) == false)
+ return _error->Error(_("Error occured while processing %s (NewFileDesc1)"),PackageName.c_str());
+ break;
+ }
+ }
+ }
+
continue;
}
pkgCache::VerIterator Ver = Pkg.VersionList();
- map_ptrloc *Last = &Pkg->VersionList;
+ map_ptrloc *LastVer = &Pkg->VersionList;
int Res = 1;
- for (; Ver.end() == false; Last = &Ver->NextVer, Ver++)
+ for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
{
Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
if (Res >= 0)
@@ -168,7 +200,7 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
// Skip to the end of the same version set.
if (Res == 0)
{
- for (; Ver.end() == false; Last = &Ver->NextVer, Ver++)
+ for (; Ver.end() == false; LastVer = &Ver->NextVer, Ver++)
{
Res = Cache.VS->CmpVersion(Version,Ver.VerStr());
if (Res != 0)
@@ -177,9 +209,10 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
}
// Add a new version
- *Last = NewVersion(Ver,Version,*Last);
+ *LastVer = NewVersion(Ver,Version,*LastVer);
Ver->ParentPkg = Pkg.Index();
Ver->Hash = Hash;
+
if (List.NewVersion(Ver) == false)
return _error->Error(_("Error occurred while processing %s (NewVersion1)"),
PackageName.c_str());
@@ -199,6 +232,21 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
FoundFileDeps |= List.HasFileDeps();
return true;
}
+
+ /* Record the Description data. Description data always exist in
+ Packages and Translation-* files. */
+ pkgCache::DescIterator Desc = Ver.DescriptionList();
+ map_ptrloc *LastDesc = &Ver->DescriptionList;
+
+ // Skip to the end of description set
+ for (; Desc.end() == false; LastDesc = &Desc->NextDesc, Desc++);
+
+ // Add new description
+ *LastDesc = NewDescription(Desc, List.DescriptionLanguage(), List.Description_md5(), *LastDesc);
+ Desc->ParentPkg = Pkg.Index();
+
+ if (NewFileDesc(Desc,List) == false)
+ return _error->Error(_("Error occured while processing %s (NewFileDesc2)"),PackageName.c_str());
}
FoundFileDeps |= List.HasFileDeps();
@@ -209,6 +257,9 @@ bool pkgCacheGenerator::MergeList(ListParser &List,
if (Cache.HeaderP->VersionCount >= (1ULL<<(sizeof(Cache.VerP->ID)*8))-1)
return _error->Error(_("Wow, you exceeded the number of versions "
"this APT is capable of."));
+ if (Cache.HeaderP->DescriptionCount >= (1ULL<<(sizeof(Cache.DescP->ID)*8))-1)
+ return _error->Error(_("Wow, you exceeded the number of descriptions "
+ "this APT is capable of."));
if (Cache.HeaderP->DependsCount >= (1ULL<<(sizeof(Cache.DepP->ID)*8))-1ULL)
return _error->Error(_("Wow, you exceeded the number of dependencies "
"this APT is capable of."));
@@ -271,7 +322,7 @@ bool pkgCacheGenerator::NewPackage(pkgCache::PkgIterator &Pkg,const string &Name
Pkg = Cache.FindPkg(Name);
if (Pkg.end() == false)
return true;
-
+
// Get a structure
unsigned long Package = Map.Allocate(sizeof(pkgCache::Package));
if (Package == 0)
@@ -349,6 +400,62 @@ unsigned long pkgCacheGenerator::NewVersion(pkgCache::VerIterator &Ver,
return Version;
}
/*}}}*/
+// CacheGenerator::NewFileDesc - Create a new File<->Desc association /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool pkgCacheGenerator::NewFileDesc(pkgCache::DescIterator &Desc,
+ ListParser &List)
+{
+ if (CurrentFile == 0)
+ return true;
+
+ // Get a structure
+ unsigned long DescFile = Map.Allocate(sizeof(pkgCache::DescFile));
+ if (DescFile == 0)
+ return 0;
+
+ pkgCache::DescFileIterator DF(Cache,Cache.DescFileP + DescFile);
+ DF->File = CurrentFile - Cache.PkgFileP;
+
+ // Link it to the end of the list
+ map_ptrloc *Last = &Desc->FileList;
+ for (pkgCache::DescFileIterator D = Desc.FileList(); D.end() == false; D++)
+ Last = &D->NextFile;
+
+ DF->NextFile = *Last;
+ *Last = DF.Index();
+
+ DF->Offset = List.Offset();
+ DF->Size = List.Size();
+ if (Cache.HeaderP->MaxDescFileSize < DF->Size)
+ Cache.HeaderP->MaxDescFileSize = DF->Size;
+ Cache.HeaderP->DescFileCount++;
+
+ return true;
+}
+ /*}}}*/
+// CacheGenerator::NewDescription - Create a new Description /*{{{*/
+// ---------------------------------------------------------------------
+/* This puts a description structure in the linked list */
+map_ptrloc pkgCacheGenerator::NewDescription(pkgCache::DescIterator &Desc,
+ const string &Lang, const MD5SumValue &md5sum,
+ map_ptrloc Next)
+{
+ // Get a structure
+ map_ptrloc Description = Map.Allocate(sizeof(pkgCache::Description));
+ if (Description == 0)
+ return 0;
+
+ // Fill it in
+ Desc = pkgCache::DescIterator(Cache,Cache.DescP + Description);
+ Desc->NextDesc = Next;
+ Desc->ID = Cache.HeaderP->DescriptionCount++;
+ Desc->language_code = Map.WriteString(Lang);
+ Desc->md5sum = Map.WriteString(md5sum.Value());
+
+ return Description;
+}
+ /*}}}*/
// ListParser::NewDepends - Create a dependency element /*{{{*/
// ---------------------------------------------------------------------
/* This creates a dependency element in the tree. It is linked to the
@@ -580,7 +687,7 @@ static bool CheckValidity(const string &CacheFile, FileIterator Start,
pkgCache::PkgFileIterator File = (*Start)->FindInCache(Cache);
if (File.end() == true)
return false;
-
+
Visited[File->ID] = true;
}
diff --git a/apt-pkg/pkgcachegen.h b/apt-pkg/pkgcachegen.h
index 9a729eea4..fae1a60a6 100644
--- a/apt-pkg/pkgcachegen.h
+++ b/apt-pkg/pkgcachegen.h
@@ -24,6 +24,7 @@
#endif
#include <apt-pkg/pkgcache.h>
+#include <apt-pkg/md5.h>
class pkgSourceList;
class OpProgress;
@@ -55,7 +56,9 @@ class pkgCacheGenerator
bool NewPackage(pkgCache::PkgIterator &Pkg,const string &Pkg);
bool NewFileVer(pkgCache::VerIterator &Ver,ListParser &List);
+ bool NewFileDesc(pkgCache::DescIterator &Desc,ListParser &List);
unsigned long NewVersion(pkgCache::VerIterator &Ver,const string &VerStr,unsigned long Next);
+ map_ptrloc NewDescription(pkgCache::DescIterator &Desc,const string &Lang,const MD5SumValue &md5sum,map_ptrloc Next);
public:
@@ -108,6 +111,9 @@ class pkgCacheGenerator::ListParser
virtual string Package() = 0;
virtual string Version() = 0;
virtual bool NewVersion(pkgCache::VerIterator Ver) = 0;
+ virtual string Description() = 0;
+ virtual string DescriptionLanguage() = 0;
+ virtual MD5SumValue Description_md5() = 0;
virtual unsigned short VersionHash() = 0;
virtual bool UsePackage(pkgCache::PkgIterator Pkg,
pkgCache::VerIterator Ver) = 0;
diff --git a/apt-pkg/pkgrecords.cc b/apt-pkg/pkgrecords.cc
index 9c2655d6a..b22f3e73f 100644
--- a/apt-pkg/pkgrecords.cc
+++ b/apt-pkg/pkgrecords.cc
@@ -63,3 +63,12 @@ pkgRecords::Parser &pkgRecords::Lookup(pkgCache::VerFileIterator const &Ver)
return *Files[Ver.File()->ID];
}
/*}}}*/
+// Records::Lookup - Get a parser for the package description file /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+pkgRecords::Parser &pkgRecords::Lookup(pkgCache::DescFileIterator const &Desc)
+{
+ Files[Desc.File()->ID]->Jump(Desc);
+ return *Files[Desc.File()->ID];
+}
+ /*}}}*/
diff --git a/apt-pkg/pkgrecords.h b/apt-pkg/pkgrecords.h
index 08f004414..31c444dbf 100644
--- a/apt-pkg/pkgrecords.h
+++ b/apt-pkg/pkgrecords.h
@@ -38,6 +38,7 @@ class pkgRecords
// Lookup function
Parser &Lookup(pkgCache::VerFileIterator const &Ver);
+ Parser &Lookup(pkgCache::DescFileIterator const &Desc);
// Construct destruct
pkgRecords(pkgCache &Cache);
@@ -49,6 +50,7 @@ class pkgRecords::Parser
protected:
virtual bool Jump(pkgCache::VerFileIterator const &Ver) = 0;
+ virtual bool Jump(pkgCache::DescFileIterator const &Desc) = 0;
public:
friend class pkgRecords;
diff --git a/buildlib/environment.mak.in b/buildlib/environment.mak.in
index f5ee539ac..2d28e1c67 100644
--- a/buildlib/environment.mak.in
+++ b/buildlib/environment.mak.in
@@ -28,6 +28,8 @@ INLINEDEPFLAG = -MD
DEBIANDOC_HTML = @DEBIANDOC_HTML@
DEBIANDOC_TEXT = @DEBIANDOC_TEXT@
+DOXYGEN = @DOXYGEN@
+
# SGML for the man pages
DOCBOOK2MAN := @DOCBOOK2MAN@
diff --git a/cmdline/apt-cache.cc b/cmdline/apt-cache.cc
index aea9ebeba..74fa71cba 100644
--- a/cmdline/apt-cache.cc
+++ b/cmdline/apt-cache.cc
@@ -71,6 +71,12 @@ void LocalitySort(pkgCache::VerFile **begin,
{
qsort(begin,Count,Size,LocalityCompare);
}
+
+void LocalitySort(pkgCache::DescFile **begin,
+ unsigned long Count,size_t Size)
+{
+ qsort(begin,Count,Size,LocalityCompare);
+}
/*}}}*/
// UnMet - Show unmet dependencies /*{{{*/
// ---------------------------------------------------------------------
@@ -182,7 +188,14 @@ bool DumpPackage(CommandLine &CmdL)
{
cout << Cur.VerStr();
for (pkgCache::VerFileIterator Vf = Cur.FileList(); Vf.end() == false; Vf++)
- cout << "(" << Vf.File().FileName() << ")";
+ cout << " (" << Vf.File().FileName() << ")";
+ cout << endl;
+ for (pkgCache::DescIterator D = Cur.DescriptionList(); D.end() == false; D++)
+ {
+ cout << " Description Language: " << D.LanguageCode() << endl
+ << " File: " << D.FileList().File().FileName() << endl
+ << " MD5: " << D.md5() << endl;
+ }
cout << endl;
}
@@ -277,11 +290,15 @@ bool Stats(CommandLine &Cmd)
cout << _("Total distinct versions: ") << Cache.Head().VersionCount << " (" <<
SizeToStr(Cache.Head().VersionCount*Cache.Head().VersionSz) << ')' << endl;
+ cout << _("Total Distinct Descriptions: ") << Cache.Head().DescriptionCount << " (" <<
+ SizeToStr(Cache.Head().DescriptionCount*Cache.Head().DescriptionSz) << ')' << endl;
cout << _("Total dependencies: ") << Cache.Head().DependsCount << " (" <<
SizeToStr(Cache.Head().DependsCount*Cache.Head().DependencySz) << ')' << endl;
cout << _("Total ver/file relations: ") << Cache.Head().VerFileCount << " (" <<
SizeToStr(Cache.Head().VerFileCount*Cache.Head().VerFileSz) << ')' << endl;
+ cout << _("Total Desc/File relations: ") << Cache.Head().DescFileCount << " (" <<
+ SizeToStr(Cache.Head().DescFileCount*Cache.Head().DescFileSz) << ')' << endl;
cout << _("Total Provides mappings: ") << Cache.Head().ProvidesCount << " (" <<
SizeToStr(Cache.Head().ProvidesCount*Cache.Head().ProvidesSz) << ')' << endl;
@@ -344,6 +361,12 @@ bool Dump(CommandLine &Cmd)
for (pkgCache::DepIterator D = V.DependsList(); D.end() == false; D++)
cout << " Depends: " << D.TargetPkg().Name() << ' ' <<
DeNull(D.TargetVer()) << endl;
+ for (pkgCache::DescIterator D = V.DescriptionList(); D.end() == false; D++)
+ {
+ cout << " Description Language: " << D.LanguageCode() << endl
+ << " File: " << D.FileList().File().FileName() << endl
+ << " MD5: " << D.md5() << endl;
+ }
}
}
@@ -1192,17 +1215,50 @@ bool DisplayRecord(pkgCache::VerIterator V)
if (_error->PendingError() == true)
return false;
- // Read the record and then write it out again.
+ // Read the record
unsigned char *Buffer = new unsigned char[GCache->HeaderP->MaxVerFileSize+1];
Buffer[V.FileList()->Size] = '\n';
if (PkgF.Seek(V.FileList()->Offset) == false ||
- PkgF.Read(Buffer,V.FileList()->Size) == false ||
- fwrite(Buffer,1,V.FileList()->Size+1,stdout) < (size_t)(V.FileList()->Size+1))
+ PkgF.Read(Buffer,V.FileList()->Size) == false)
{
delete [] Buffer;
return false;
}
-
+
+ // Get a pointer to start of Description field
+ const unsigned char *DescP = (unsigned char*)strstr((char*)Buffer, "Description:");
+
+ // Write all but Description
+ if (fwrite(Buffer,1,DescP - Buffer,stdout) < (size_t)(DescP - Buffer))
+ {
+ delete [] Buffer;
+ return false;
+ }
+
+ // Show the right description
+ pkgRecords Recs(*GCache);
+ pkgCache::DescIterator Desc = V.TranslatedDescription();
+ pkgRecords::Parser &P = Recs.Lookup(Desc.FileList());
+ cout << "Description" << ( (strcmp(Desc.LanguageCode(),"") != 0) ? "-" : "" ) << Desc.LanguageCode() << ": " << P.LongDesc();
+
+ // Find the first field after the description (if there is any)
+ for(DescP++;DescP != &Buffer[V.FileList()->Size];DescP++)
+ {
+ if(*DescP == '\n' && *(DescP+1) != ' ')
+ {
+ // write the rest of the buffer
+ const unsigned char *end=&Buffer[V.FileList()->Size];
+ if (fwrite(DescP,1,end-DescP,stdout) < (size_t)(end-DescP))
+ {
+ delete [] Buffer;
+ return false;
+ }
+
+ break;
+ }
+ }
+ // write a final newline (after the description)
+ cout<<endl;
delete [] Buffer;
return true;
@@ -1211,9 +1267,9 @@ bool DisplayRecord(pkgCache::VerIterator V)
// Search - Perform a search /*{{{*/
// ---------------------------------------------------------------------
/* This searches the package names and pacakge descriptions for a pattern */
-struct ExVerFile
+struct ExDescFile
{
- pkgCache::VerFile *Vf;
+ pkgCache::DescFile *Df;
bool NameMatch;
};
@@ -1253,35 +1309,35 @@ bool Search(CommandLine &CmdL)
return false;
}
- ExVerFile *VFList = new ExVerFile[Cache.HeaderP->PackageCount+1];
- memset(VFList,0,sizeof(*VFList)*Cache.HeaderP->PackageCount+1);
+ ExDescFile *DFList = new ExDescFile[Cache.HeaderP->PackageCount+1];
+ memset(DFList,0,sizeof(*DFList)*Cache.HeaderP->PackageCount+1);
// Map versions that we want to write out onto the VerList array.
for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
{
- VFList[P->ID].NameMatch = NumPatterns != 0;
+ DFList[P->ID].NameMatch = NumPatterns != 0;
for (unsigned I = 0; I != NumPatterns; I++)
{
if (regexec(&Patterns[I],P.Name(),0,0,0) == 0)
- VFList[P->ID].NameMatch &= true;
+ DFList[P->ID].NameMatch &= true;
else
- VFList[P->ID].NameMatch = false;
+ DFList[P->ID].NameMatch = false;
}
// Doing names only, drop any that dont match..
- if (NamesOnly == true && VFList[P->ID].NameMatch == false)
+ if (NamesOnly == true && DFList[P->ID].NameMatch == false)
continue;
// Find the proper version to use.
pkgCache::VerIterator V = Plcy.GetCandidateVer(P);
if (V.end() == false)
- VFList[P->ID].Vf = V.FileList();
+ DFList[P->ID].Df = V.DescriptionList().FileList();
}
// Include all the packages that provide matching names too
for (pkgCache::PkgIterator P = Cache.PkgBegin(); P.end() == false; P++)
{
- if (VFList[P->ID].NameMatch == false)
+ if (DFList[P->ID].NameMatch == false)
continue;
for (pkgCache::PrvIterator Prv = P.ProvidesList() ; Prv.end() == false; Prv++)
@@ -1289,18 +1345,18 @@ bool Search(CommandLine &CmdL)
pkgCache::VerIterator V = Plcy.GetCandidateVer(Prv.OwnerPkg());
if (V.end() == false)
{
- VFList[Prv.OwnerPkg()->ID].Vf = V.FileList();
- VFList[Prv.OwnerPkg()->ID].NameMatch = true;
+ DFList[Prv.OwnerPkg()->ID].Df = V.DescriptionList().FileList();
+ DFList[Prv.OwnerPkg()->ID].NameMatch = true;
}
}
}
-
- LocalitySort(&VFList->Vf,Cache.HeaderP->PackageCount,sizeof(*VFList));
+
+ LocalitySort(&DFList->Df,Cache.HeaderP->PackageCount,sizeof(*DFList));
// Iterate over all the version records and check them
- for (ExVerFile *J = VFList; J->Vf != 0; J++)
+ for (ExDescFile *J = DFList; J->Df != 0; J++)
{
- pkgRecords::Parser &P = Recs.Lookup(pkgCache::VerFileIterator(Cache,J->Vf));
+ pkgRecords::Parser &P = Recs.Lookup(pkgCache::DescFileIterator(Cache,J->Df));
bool Match = true;
if (J->NameMatch == false)
@@ -1331,7 +1387,7 @@ bool Search(CommandLine &CmdL)
}
}
- delete [] VFList;
+ delete [] DFList;
for (unsigned I = 0; I != NumPatterns; I++)
regfree(&Patterns[I]);
if (ferror(stdout))
diff --git a/cmdline/apt-get.cc b/cmdline/apt-get.cc
index 64882e3e8..4dfb3325c 100644
--- a/cmdline/apt-get.cc
+++ b/cmdline/apt-get.cc
@@ -60,6 +60,7 @@
#include <errno.h>
#include <regex.h>
#include <sys/wait.h>
+#include <sstream>
/*}}}*/
using namespace std;
@@ -628,6 +629,8 @@ void CacheFile::Sort()
and verifies that the system is OK. */
bool CacheFile::CheckDeps(bool AllowBroken)
{
+ bool FixBroken = _config->FindB("APT::Get::Fix-Broken",false);
+
if (_error->PendingError() == true)
return false;
@@ -639,12 +642,24 @@ bool CacheFile::CheckDeps(bool AllowBroken)
if (pkgApplyStatus(*DCache) == false)
return false;
+ if (_config->FindB("APT::Get::Fix-Policy-Broken",false) == true)
+ {
+ FixBroken = true;
+ if ((DCache->PolicyBrokenCount() > 0))
+ {
+ // upgrade all policy-broken packages with ForceImportantDeps=True
+ for (pkgCache::PkgIterator I = Cache->PkgBegin(); !I.end(); I++)
+ if ((*DCache)[I].NowPolicyBroken() == true)
+ DCache->MarkInstall(I,true,0, false, true);
+ }
+ }
+
// Nothing is broken
if (DCache->BrokenCount() == 0 || AllowBroken == true)
return true;
// Attempt to fix broken things
- if (_config->FindB("APT::Get::Fix-Broken",false) == true)
+ if (FixBroken == true)
{
c1out << _("Correcting dependencies...") << flush;
if (pkgFixBroken(*DCache) == false || DCache->BrokenCount() != 0)
@@ -995,7 +1010,7 @@ bool InstallPackages(CacheFile &Cache,bool ShwKept,bool Ask = true,
cerr << _("Unable to correct missing packages.") << endl;
return _error->Error(_("Aborting install."));
}
-
+
_system->UnLock();
int status_fd = _config->FindI("APT::Status-Fd",-1);
pkgPackageManager::OrderResult Res = PM->DoInstall(status_fd);
@@ -1145,9 +1160,11 @@ bool TryToInstall(pkgCache::PkgIterator Pkg,pkgDepCache &Cache,
else
ExpectedInst++;
- // Install it with autoinstalling enabled.
- if (State.InstBroken() == true && BrokenFix == false)
+ // Install it with autoinstalling enabled (if we not respect the minial
+ // required deps or the policy)
+ if ((State.InstBroken() == true || State.InstPolicyBroken() == true) && BrokenFix == false)
Cache.MarkInstall(Pkg,true);
+
return true;
}
/*}}}*/
@@ -1386,6 +1403,52 @@ bool DoUpdate(CommandLine &CmdL)
return true;
}
/*}}}*/
+// DoAutomaticRemove - Remove all automatic unused packages /*{{{*/
+// ---------------------------------------------------------------------
+/* Remove unused automatic packages */
+bool DoAutomaticRemove(CacheFile &Cache)
+{
+ if(_config->FindI("Debug::pkgAutoRemove",false))
+ std::cout << "DoAutomaticRemove()" << std::endl;
+
+ if (_config->FindB("APT::Get::Remove",true) == false)
+ return _error->Error(_("We are not supposed to delete stuff, can't "
+ "start AutoRemover"));
+
+ {
+ pkgDepCache::ActionGroup group(*Cache);
+
+ // look over the cache to see what can be removed
+ for (pkgCache::PkgIterator Pkg = Cache->PkgBegin(); ! Pkg.end(); ++Pkg)
+ {
+ if (Cache[Pkg].Garbage)
+ {
+ if(Pkg.CurrentVer() != 0 || Cache[Pkg].Install())
+ fprintf(stdout,"We could delete %s\n", Pkg.Name());
+
+ if(Pkg.CurrentVer() != 0 && Pkg->CurrentState != pkgCache::State::ConfigFiles)
+ Cache->MarkDelete(Pkg, _config->FindB("APT::Get::Purge", false));
+ else
+ Cache->MarkKeep(Pkg, false, false);
+ }
+ }
+ }
+
+ // Now see if we destroyed anything
+ if (Cache->BrokenCount() != 0)
+ {
+ c1out << _("Hmm, seems like the AutoRemover destroyed something which really\n"
+ "shouldn't happen. Please file a bug report against apt.") << endl;
+ c1out << endl;
+ c1out << _("The following information may help to resolve the situation:") << endl;
+ c1out << endl;
+ ShowBroken(c1out,Cache,false);
+
+ return _error->Error(_("Internal Error, AutoRemover broke stuff"));
+ }
+ return true;
+}
+
// DoUpgrade - Upgrade all packages /*{{{*/
// ---------------------------------------------------------------------
/* Upgrade all packages without installing new packages or erasing old
@@ -1428,6 +1491,11 @@ bool DoInstall(CommandLine &CmdL)
bool DefRemove = false;
if (strcasecmp(CmdL.FileList[0],"remove") == 0)
DefRemove = true;
+ else if (strcasecmp(CmdL.FileList[0], "autoremove") == 0)
+ {
+ _config->Set("APT::Get::AutomaticRemove", "true");
+ DefRemove = true;
+ }
for (const char **I = CmdL.FileList + 1; *I != 0; I++)
{
@@ -1577,6 +1645,11 @@ bool DoInstall(CommandLine &CmdL)
return _error->Error(_("Broken packages"));
}
+ if (_config->FindB("APT::Get::AutomaticRemove")) {
+ if (!DoAutomaticRemove(Cache))
+ return false;
+ }
+
/* Print out a list of packages that are going to be installed extra
to what the user asked */
if (Cache->InstCount() != ExpectedInst)
@@ -1596,8 +1669,8 @@ bool DoInstall(CommandLine &CmdL)
if (*J == 0) {
List += string(I.Name()) + " ";
- VersionsList += string(Cache[I].CandVersion) + "\n";
- }
+ VersionsList += string(Cache[I].CandVersion) + "\n";
+ }
}
ShowList(c1out,_("The following extra packages will be installed:"),List,VersionsList);
@@ -1732,6 +1805,8 @@ bool DoDSelectUpgrade(CommandLine &CmdL)
if (Cache.OpenForInstall() == false || Cache.CheckDeps() == false)
return false;
+ pkgDepCache::ActionGroup group(Cache);
+
// Install everything with the install flag set
pkgCache::PkgIterator I = Cache->PkgBegin();
for (;I.end() != true; I++)
@@ -2484,6 +2559,7 @@ void GetInitialize()
_config->Set("APT::Get::Fix-Broken",false);
_config->Set("APT::Get::Force-Yes",false);
_config->Set("APT::Get::List-Cleanup",true);
+ _config->Set("APT::Get::AutomaticRemove",false);
}
/*}}}*/
// SigWinch - Window size change signal handler /*{{{*/
@@ -2539,7 +2615,10 @@ int main(int argc,const char *argv[])
{0,"remove","APT::Get::Remove",0},
{0,"only-source","APT::Get::Only-Source",0},
{0,"arch-only","APT::Get::Arch-Only",0},
+ {0,"auto-remove","APT::Get::AutomaticRemove",0},
{0,"allow-unauthenticated","APT::Get::AllowUnauthenticated",0},
+ {0,"install-recommends","APT::Install-Recommends",CommandLine::Boolean},
+ {0,"fix-policy","APT::Get::Fix-Policy-Broken",0},
{'c',"config-file",0,CommandLine::ConfigFile},
{'o',"option",0,CommandLine::ArbItem},
{0,0,0,0}};
@@ -2547,6 +2626,7 @@ int main(int argc,const char *argv[])
{"upgrade",&DoUpgrade},
{"install",&DoInstall},
{"remove",&DoInstall},
+ {"autoremove",&DoInstall},
{"dist-upgrade",&DoDistUpgrade},
{"dselect-upgrade",&DoDSelectUpgrade},
{"build-dep",&DoBuildDep},
diff --git a/cmdline/apt-mark b/cmdline/apt-mark
new file mode 100755
index 000000000..533ed8715
--- /dev/null
+++ b/cmdline/apt-mark
@@ -0,0 +1,63 @@
+#!/usr/bin/python
+
+from optparse import OptionParser
+
+try:
+ import apt_pkg
+except ImportError:
+ print "Error importing apt_pkg, is python-apt installed?"
+
+import sys
+import os.path
+
+actions = { "markauto" : 1,
+ "unmarkauto": 0
+ }
+
+if __name__ == "__main__":
+ apt_pkg.init()
+
+ # option parsing
+ parser = OptionParser()
+ parser.usage = "%prog [options] {markauto|unmarkauto} packages..."
+ parser.add_option("-f", "--file", action="store", type="string",
+ dest="filename",
+ help="read/write a different file")
+ parser.add_option("-v", "--verbose",
+ action="store_true", dest="verbose", default=False,
+ help="print verbose status messages to stdout")
+ (options, args) = parser.parse_args()
+ if len(args) < 2:
+ parser.error("not enough argument")
+
+ # get pkgs to change
+ if args[0] not in actions.keys():
+ parser.error("first argument must be 'markauto' or 'unmarkauto'")
+ pkgs = args[1:]
+ action = actions[args[0]]
+
+ # get the state-file
+ if not options.filename:
+ STATE_FILE = apt_pkg.Config.FindDir("Dir::State") + "extended_states"
+ else:
+ STATE_FILE=options.state_file
+
+ # open the statefile
+ if os.path.exists(STATE_FILE):
+ tagfile = apt_pkg.ParseTagFile(open(STATE_FILE))
+ outfile = open(STATE_FILE+".tmp","w")
+ while tagfile.Step():
+ pkgname = tagfile.Section.get("Package")
+ autoInst = tagfile.Section.get("Auto-Installed")
+ if pkgname in pkgs:
+ if options.verbose:
+ print "changing %s to %s" % (pkgname,action)
+ newsec = apt_pkg.RewriteSection(tagfile.Section,
+ [],
+ [ ("Auto-Installed",str(action)) ]
+ )
+ outfile.write(newsec+"\n")
+ else:
+ outfile.write(str(tagfile.Section)+"\n")
+ # all done, rename the tmpfile
+ os.rename(outfile.name, STATE_FILE)
diff --git a/configure.in b/configure.in
index 35d0ea5ee..19828cc85 100644
--- a/configure.in
+++ b/configure.in
@@ -18,7 +18,7 @@ AC_CONFIG_AUX_DIR(buildlib)
AC_CONFIG_HEADER(include/config.h:buildlib/config.h.in include/apti18n.h:buildlib/apti18n.h.in)
dnl -- SET THIS TO THE RELEASE VERSION --
-AC_DEFINE_UNQUOTED(VERSION,"0.6.45.1")
+AC_DEFINE_UNQUOTED(VERSION,"0.6.46.1exp1")
PACKAGE="apt"
AC_DEFINE_UNQUOTED(PACKAGE,"$PACKAGE")
AC_SUBST(PACKAGE)
@@ -175,12 +175,21 @@ dnl Check for debiandoc
AC_PATH_PROG(DEBIANDOC_HTML,debiandoc2html)
AC_PATH_PROG(DEBIANDOC_TEXT,debiandoc2text)
+dnl Check for doxygen
+AC_PATH_PROG(DOXYGEN, doxygen)
+
dnl Check for the SGML tools needed to build man pages
AC_PATH_PROG(DOCBOOK2MAN,docbook2man)
dnl Check for the XML tools needed to build man pages
AC_PATH_PROG(XMLTO,xmlto)
+dnl Check for graphviz
+AC_CHECK_PROG([HAVE_DOT], [dot], [YES], [NO])
+AC_PATH_PROG([DOT], [dot], [])
+DOTDIR=$(dirname $DOT)
+AC_SUBST(DOTDIR)
+
dnl Check for YODL
dnl AC_CHECK_PROG(YODL_MAN,yodl2man,"yes","")
@@ -200,4 +209,4 @@ fi
AC_SUBST(USE_NLS)
AC_PATH_PROG(BASH, bash)
-AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in,make -s dirs)
+AC_OUTPUT(environment.mak:buildlib/environment.mak.in makefile:buildlib/makefile.in doc/Doxyfile,make -s dirs)
diff --git a/debian/changelog b/debian/changelog
index 88eb14871..521650c93 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,12 +1,19 @@
-apt (0.6.46.1) unstable; urgency=low
-
+apt (0.6.46.1exp1) experimental; urgency=low
+
+ * merged "install-recommends" branch (ABI break):
+ - new "--install-recommends"
+ - install new recommends on "upgrade" if --install-recommends is
+ given
+ - new "--fix-policy" option to install all packages with unmet
+ important dependencies (usefull with --install-recommends to
+ see what not-installed recommends are on the system)
+ - fix of recommended packages display (only show CandidateVersion
+ fix or-group handling)
* methods/gzip.cc:
- deal with empty files
* Applied patch from Daniel Schepler to make apt bin-NMU able.
- (closes: bug#359634)
* rebuild against current g++ because of:
http://gcc.gnu.org/bugzilla/show_bug.cgi?id=29289
- (closes: #390189)
* fix broken i18n in the dpkg progress reporting, thanks to
Frans Pop and Steinar Gunderson. (closes: #389261)
* Merged from Christian Perrier bzr branch:
@@ -57,7 +64,7 @@ apt (0.6.45) unstable; urgency=low
* apt-pkg/contrib/sha256.cc:
- fixed the sha256 generation (closes: #378183)
* ftparchive/cachedb.cc:
- - applied patch from Anthony Towns to fix Clean() function
+ - applied patch from ajt to fix Clean() function
(closes: #379576)
* doc/apt-get.8.xml:
- fix path to the apt user build (Closes: #375640)
@@ -75,9 +82,6 @@ apt (0.6.45) unstable; urgency=low
- fix for string mangling, closes: #373864
* apt-pkg/acquire-item.cc:
- check for bzip2 in /bin (closes: #377391)
- * apt-pkg/tagfile.cc:
- - make it work on non-mapable files again, thanks
- to James Troup for confirming the fix (closes: #376777)
* Merged from Christian Perrier bzr branch:
* ko.po: Updated to 512t. Closes: #378901
* hu.po: Updated to 512t. Closes: #376330
@@ -88,26 +92,42 @@ apt (0.6.45) unstable; urgency=low
* dz.po: New Dzongkha translation: 512t
* ro.po: Updated to 512t
* eu.po: Updated
+ * eu.po: Updated
+ * fix apt-get dist-upgrade
+ * fix warning if no /var/lib/apt/extended_states is present
+ * don't download Translations for deb-src sources.list lines
+ * apt-pkg/tagfile.cc:
+ - support not-mmapable files again
- -- Michael Vogt <mvo@debian.org> Thu, 27 Jul 2006 00:52:05 +0200
+ -- Michael Vogt <michael.vogt@ubuntu.com> Tue, 25 Jul 2006 11:55:22 +0200
-apt (0.6.44.2) unstable; urgency=low
-
- * apt-pkg/depcache.cc:
- - added Debug::pkgDepCache::AutoInstall (thanks to infinity)
- * apt-pkg/acquire-item.cc:
- - fix missing chmod() in the new aquire code
- (thanks to Bastian Blank, Closes: #367425)
- * merged from
- http://www.perrier.eu.org/debian/packages/d-i/level4/apt-main:
- * sk.po: Completed to 512t
- * eu.po: Completed to 512t
- * fr.po: Completed to 512t
- * sv.po: Completed to 512t
- * Update all PO and the POT. Gives 506t6f for formerly
- complete translations
-
- -- Michael Vogt <mvo@debian.org> Wed, 14 Jun 2006 12:00:57 +0200
+apt (0.6.44.2exp1) experimental; urgency=low
+
+ * added support for i18n of the package descriptions
+ * added support for aptitude like auto-install tracking (a HUGE
+ HUGE thanks to Daniel Burrows who made this possible)
+ * synced with the http://people.debian.org/~mvo/bzr/apt/debian-sid branch
+ * build from http://people.debian.org/~mvo/bzr/apt/debian-experimental
+
+ -- Michael Vogt <mvo@debian.org> Mon, 3 Jul 2006 21:50:31 +0200
+
+apt (0.6.44.2) unstable; urgency=low
+
+ * apt-pkg/depcache.cc:
+ - added Debug::pkgDepCache::AutoInstall (thanks to infinity)
+ * apt-pkg/acquire-item.cc:
+ - fix missing chmod() in the new aquire code
+ (thanks to Bastian Blank, Closes: #367425)
+ * merged from
+ http://www.perrier.eu.org/debian/packages/d-i/level4/apt-main:
+ * sk.po: Completed to 512t
+ * eu.po: Completed to 512t
+ * fr.po: Completed to 512t
+ * sv.po: Completed to 512t
+ * Update all PO and the POT. Gives 506t6f for formerly
+ complete translations
+
+ -- Michael Vogt <mvo@debian.org> Wed, 14 Jun 2006 12:00:57 +0200
apt (0.6.44.1-0.1) unstable; urgency=low
@@ -339,7 +359,7 @@ apt (0.6.42) unstable; urgency=low
* cmdline/apt-cdrom.cc:
- fix some missing gettext() calls (closes: #334539)
* doc/apt-cache.8.xml: fix typo (closes: #334714)
-
+
-- Michael Vogt <mvo@debian.org> Wed, 19 Oct 2005 22:02:09 +0200
apt (0.6.41) unstable; urgency=low
@@ -439,6 +459,7 @@ apt (0.6.37) breezy; urgency=low
* Add Welsh translation from Dafydd Harries
(daf@muse.19inch.net--2005/apt--main--0--patch-1)
* Change debian/bugscript to use #!/bin/bash (Closes: #313402)
+ * Fix a incorrect example in the man-page (closes: #282918)
-- Matt Zimmerman <mdz@ubuntu.com> Tue, 24 May 2005 14:38:25 -0700
diff --git a/debian/rules b/debian/rules
index 02ee734bb..c9bd1f34a 100755
--- a/debian/rules
+++ b/debian/rules
@@ -338,4 +338,4 @@ arch-build:
mkdir -p debian/arch-build/apt-$(APT_DEBVER)
tar -c --exclude=arch-build --no-recursion -f - `bzr inventory` | (cd debian/arch-build/$(PKG)-$(APT_DEBVER);tar xf -)
$(MAKE) -C debian/arch-build/apt-$(APT_DEBVER) startup doc
- (cd debian/arch-build/apt-$(APT_DEBVER); $(DEB_BUILD_PROG))
+ (cd debian/arch-build/apt-$(APT_DEBVER); $(DEB_BUILD_PROG); dpkg-genchanges -S > ../apt_$(APT_DEBVER)_source.changes)
diff --git a/doc/Doxyfile.in b/doc/Doxyfile.in
new file mode 100644
index 000000000..f19ff93f6
--- /dev/null
+++ b/doc/Doxyfile.in
@@ -0,0 +1,1238 @@
+# Doxyfile 1.4.5
+
+# This file describes the settings to be used by the documentation system
+# doxygen (www.doxygen.org) for a project
+#
+# All text after a hash (#) is considered a comment and will be ignored
+# The format is:
+# TAG = value [value, ...]
+# For lists items can also be appended using:
+# TAG += value [value, ...]
+# Values that contain spaces should be placed between quotes (" ")
+
+#---------------------------------------------------------------------------
+# Project related configuration options
+#---------------------------------------------------------------------------
+
+# The PROJECT_NAME tag is a single word (or a sequence of words surrounded
+# by quotes) that should identify the project.
+
+PROJECT_NAME = @PACKAGE@
+
+# The PROJECT_NUMBER tag can be used to enter a project or revision number.
+# This could be handy for archiving the generated documentation or
+# if some version control system is used.
+
+PROJECT_NUMBER = @VERSION@
+
+# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute)
+# base path where the generated documentation will be put.
+# If a relative path is entered, it will be relative to the location
+# where doxygen was started. If left blank the current directory will be used.
+
+OUTPUT_DIRECTORY = ../build/doc/doxygen
+
+# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create
+# 4096 sub-directories (in 2 levels) under the output directory of each output
+# format and will distribute the generated files over these directories.
+# Enabling this option can be useful when feeding doxygen a huge amount of
+# source files, where putting all generated files in the same directory would
+# otherwise cause performance problems for the file system.
+
+CREATE_SUBDIRS = NO
+
+# The OUTPUT_LANGUAGE tag is used to specify the language in which all
+# documentation generated by doxygen is written. Doxygen will use this
+# information to generate all constant output in the proper language.
+# The default language is English, other supported languages are:
+# Brazilian, Catalan, Chinese, Chinese-Traditional, Croatian, Czech, Danish,
+# Dutch, Finnish, French, German, Greek, Hungarian, Italian, Japanese,
+# Japanese-en (Japanese with English messages), Korean, Korean-en, Norwegian,
+# Polish, Portuguese, Romanian, Russian, Serbian, Slovak, Slovene, Spanish,
+# Swedish, and Ukrainian.
+
+OUTPUT_LANGUAGE = English
+
+# This tag can be used to specify the encoding used in the generated output.
+# The encoding is not always determined by the language that is chosen,
+# but also whether or not the output is meant for Windows or non-Windows users.
+# In case there is a difference, setting the USE_WINDOWS_ENCODING tag to YES
+# forces the Windows encoding (this is the default for the Windows binary),
+# whereas setting the tag to NO uses a Unix-style encoding (the default for
+# all platforms other than Windows).
+
+USE_WINDOWS_ENCODING = NO
+
+# If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will
+# include brief member descriptions after the members that are listed in
+# the file and class documentation (similar to JavaDoc).
+# Set to NO to disable this.
+
+BRIEF_MEMBER_DESC = YES
+
+# If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend
+# the brief description of a member or function before the detailed description.
+# Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
+# brief descriptions will be completely suppressed.
+
+REPEAT_BRIEF = YES
+
+# This tag implements a quasi-intelligent brief description abbreviator
+# that is used to form the text in various listings. Each string
+# in this list, if found as the leading text of the brief description, will be
+# stripped from the text and the result after processing the whole list, is
+# used as the annotated text. Otherwise, the brief description is used as-is.
+# If left blank, the following values are used ("$name" is automatically
+# replaced with the name of the entity): "The $name class" "The $name widget"
+# "The $name file" "is" "provides" "specifies" "contains"
+# "represents" "a" "an" "the"
+
+ABBREVIATE_BRIEF =
+
+# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
+# Doxygen will generate a detailed section even if there is only a brief
+# description.
+
+ALWAYS_DETAILED_SEC = NO
+
+# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
+# inherited members of a class in the documentation of that class as if those
+# members were ordinary class members. Constructors, destructors and assignment
+# operators of the base classes will not be shown.
+
+INLINE_INHERITED_MEMB = NO
+
+# If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full
+# path before files name in the file list and in the header files. If set
+# to NO the shortest path that makes the file name unique will be used.
+
+FULL_PATH_NAMES = YES
+
+# If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag
+# can be used to strip a user-defined part of the path. Stripping is
+# only done if one of the specified strings matches the left-hand part of
+# the path. The tag can be used to show relative paths in the file list.
+# If left blank the directory from which doxygen is run is used as the
+# path to strip.
+
+STRIP_FROM_PATH =
+
+# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of
+# the path mentioned in the documentation of a class, which tells
+# the reader which header file to include in order to use a class.
+# If left blank only the name of the header file containing the class
+# definition is used. Otherwise one should specify the include paths that
+# are normally passed to the compiler using the -I flag.
+
+STRIP_FROM_INC_PATH =
+
+# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter
+# (but less readable) file names. This can be useful is your file systems
+# doesn't support long names like on DOS, Mac, or CD-ROM.
+
+SHORT_NAMES = NO
+
+# If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen
+# will interpret the first line (until the first dot) of a JavaDoc-style
+# comment as the brief description. If set to NO, the JavaDoc
+# comments will behave just like the Qt-style comments (thus requiring an
+# explicit @brief command for a brief description.
+
+JAVADOC_AUTOBRIEF = NO
+
+# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen
+# treat a multi-line C++ special comment block (i.e. a block of //! or ///
+# comments) as a brief description. This used to be the default behaviour.
+# The new default is to treat a multi-line C++ comment block as a detailed
+# description. Set this tag to YES if you prefer the old behaviour instead.
+
+MULTILINE_CPP_IS_BRIEF = NO
+
+# If the DETAILS_AT_TOP tag is set to YES then Doxygen
+# will output the detailed description near the top, like JavaDoc.
+# If set to NO, the detailed description appears after the member
+# documentation.
+
+DETAILS_AT_TOP = NO
+
+# If the INHERIT_DOCS tag is set to YES (the default) then an undocumented
+# member inherits the documentation from any documented member that it
+# re-implements.
+
+INHERIT_DOCS = YES
+
+# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce
+# a new page for each member. If set to NO, the documentation of a member will
+# be part of the file/class/namespace that contains it.
+
+SEPARATE_MEMBER_PAGES = NO
+
+# The TAB_SIZE tag can be used to set the number of spaces in a tab.
+# Doxygen uses this value to replace tabs by spaces in code fragments.
+
+TAB_SIZE = 8
+
+# This tag can be used to specify a number of aliases that acts
+# as commands in the documentation. An alias has the form "name=value".
+# For example adding "sideeffect=\par Side Effects:\n" will allow you to
+# put the command \sideeffect (or @sideeffect) in the documentation, which
+# will result in a user-defined paragraph with heading "Side Effects:".
+# You can put \n's in the value part of an alias to insert newlines.
+
+ALIASES =
+
+# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C
+# sources only. Doxygen will then generate output that is more tailored for C.
+# For instance, some of the names that are used will be different. The list
+# of all members will be omitted, etc.
+
+OPTIMIZE_OUTPUT_FOR_C = NO
+
+# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java
+# sources only. Doxygen will then generate output that is more tailored for Java.
+# For instance, namespaces will be presented as packages, qualified scopes
+# will look different, etc.
+
+OPTIMIZE_OUTPUT_JAVA = NO
+
+# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want to
+# include (a tag file for) the STL sources as input, then you should
+# set this tag to YES in order to let doxygen match functions declarations and
+# definitions whose arguments contain STL classes (e.g. func(std::string); v.s.
+# func(std::string) {}). This also make the inheritance and collaboration
+# diagrams that involve STL classes more complete and accurate.
+
+BUILTIN_STL_SUPPORT = YES
+
+# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
+# tag is set to YES, then doxygen will reuse the documentation of the first
+# member in the group (if any) for the other members of the group. By default
+# all members of a group must be documented explicitly.
+
+DISTRIBUTE_GROUP_DOC = NO
+
+# Set the SUBGROUPING tag to YES (the default) to allow class member groups of
+# the same type (for instance a group of public functions) to be put as a
+# subgroup of that type (e.g. under the Public Functions section). Set it to
+# NO to prevent subgrouping. Alternatively, this can be done per class using
+# the \nosubgrouping command.
+
+SUBGROUPING = YES
+
+#---------------------------------------------------------------------------
+# Build related configuration options
+#---------------------------------------------------------------------------
+
+# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in
+# documentation are documented, even if no documentation was available.
+# Private class members and static file members will be hidden unless
+# the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES
+
+EXTRACT_ALL = NO
+
+# If the EXTRACT_PRIVATE tag is set to YES all private members of a class
+# will be included in the documentation.
+
+EXTRACT_PRIVATE = NO
+
+# If the EXTRACT_STATIC tag is set to YES all static members of a file
+# will be included in the documentation.
+
+EXTRACT_STATIC = NO
+
+# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs)
+# defined locally in source files will be included in the documentation.
+# If set to NO only classes defined in header files are included.
+
+EXTRACT_LOCAL_CLASSES = YES
+
+# This flag is only useful for Objective-C code. When set to YES local
+# methods, which are defined in the implementation section but not in
+# the interface are included in the documentation.
+# If set to NO (the default) only methods in the interface are included.
+
+EXTRACT_LOCAL_METHODS = NO
+
+# If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all
+# undocumented members of documented classes, files or namespaces.
+# If set to NO (the default) these members will be included in the
+# various overviews, but no documentation section is generated.
+# This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_MEMBERS = NO
+
+# If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all
+# undocumented classes that are normally visible in the class hierarchy.
+# If set to NO (the default) these classes will be included in the various
+# overviews. This option has no effect if EXTRACT_ALL is enabled.
+
+HIDE_UNDOC_CLASSES = NO
+
+# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all
+# friend (class|struct|union) declarations.
+# If set to NO (the default) these declarations will be included in the
+# documentation.
+
+HIDE_FRIEND_COMPOUNDS = NO
+
+# If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any
+# documentation blocks found inside the body of a function.
+# If set to NO (the default) these blocks will be appended to the
+# function's detailed documentation block.
+
+HIDE_IN_BODY_DOCS = NO
+
+# The INTERNAL_DOCS tag determines if documentation
+# that is typed after a \internal command is included. If the tag is set
+# to NO (the default) then the documentation will be excluded.
+# Set it to YES to include the internal documentation.
+
+INTERNAL_DOCS = NO
+
+# If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate
+# file names in lower-case letters. If set to YES upper-case letters are also
+# allowed. This is useful if you have classes or files whose names only differ
+# in case and if your file system supports case sensitive file names. Windows
+# and Mac users are advised to set this option to NO.
+
+CASE_SENSE_NAMES = YES
+
+# If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen
+# will show members with their full class and namespace scopes in the
+# documentation. If set to YES the scope will be hidden.
+
+HIDE_SCOPE_NAMES = YES
+
+# If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen
+# will put a list of the files that are included by a file in the documentation
+# of that file.
+
+SHOW_INCLUDE_FILES = YES
+
+# If the INLINE_INFO tag is set to YES (the default) then a tag [inline]
+# is inserted in the documentation for inline members.
+
+INLINE_INFO = YES
+
+# If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen
+# will sort the (detailed) documentation of file and class members
+# alphabetically by member name. If set to NO the members will appear in
+# declaration order.
+
+SORT_MEMBER_DOCS = YES
+
+# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the
+# brief documentation of file, namespace and class members alphabetically
+# by member name. If set to NO (the default) the members will appear in
+# declaration order.
+
+SORT_BRIEF_DOCS = NO
+
+# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be
+# sorted by fully-qualified names, including namespaces. If set to
+# NO (the default), the class list will be sorted only by class name,
+# not including the namespace part.
+# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
+# Note: This option applies only to the class list, not to the
+# alphabetical list.
+
+SORT_BY_SCOPE_NAME = NO
+
+# The GENERATE_TODOLIST tag can be used to enable (YES) or
+# disable (NO) the todo list. This list is created by putting \todo
+# commands in the documentation.
+
+GENERATE_TODOLIST = YES
+
+# The GENERATE_TESTLIST tag can be used to enable (YES) or
+# disable (NO) the test list. This list is created by putting \test
+# commands in the documentation.
+
+GENERATE_TESTLIST = YES
+
+# The GENERATE_BUGLIST tag can be used to enable (YES) or
+# disable (NO) the bug list. This list is created by putting \bug
+# commands in the documentation.
+
+GENERATE_BUGLIST = YES
+
+# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or
+# disable (NO) the deprecated list. This list is created by putting
+# \deprecated commands in the documentation.
+
+GENERATE_DEPRECATEDLIST= YES
+
+# The ENABLED_SECTIONS tag can be used to enable conditional
+# documentation sections, marked by \if sectionname ... \endif.
+
+ENABLED_SECTIONS =
+
+# The MAX_INITIALIZER_LINES tag determines the maximum number of lines
+# the initial value of a variable or define consists of for it to appear in
+# the documentation. If the initializer consists of more lines than specified
+# here it will be hidden. Use a value of 0 to hide initializers completely.
+# The appearance of the initializer of individual variables and defines in the
+# documentation can be controlled using \showinitializer or \hideinitializer
+# command in the documentation regardless of this setting.
+
+MAX_INITIALIZER_LINES = 30
+
+# Set the SHOW_USED_FILES tag to NO to disable the list of files generated
+# at the bottom of the documentation of classes and structs. If set to YES the
+# list will mention the files that were used to generate the documentation.
+
+SHOW_USED_FILES = YES
+
+# If the sources in your project are distributed over multiple directories
+# then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy
+# in the documentation. The default is YES.
+
+SHOW_DIRECTORIES = YES
+
+# The FILE_VERSION_FILTER tag can be used to specify a program or script that
+# doxygen should invoke to get the current version for each file (typically from the
+# version control system). Doxygen will invoke the program by executing (via
+# popen()) the command <command> <input-file>, where <command> is the value of
+# the FILE_VERSION_FILTER tag, and <input-file> is the name of an input file
+# provided by doxygen. Whatever the program writes to standard output
+# is used as the file version. See the manual for examples.
+
+FILE_VERSION_FILTER =
+
+#---------------------------------------------------------------------------
+# configuration options related to warning and progress messages
+#---------------------------------------------------------------------------
+
+# The QUIET tag can be used to turn on/off the messages that are generated
+# by doxygen. Possible values are YES and NO. If left blank NO is used.
+
+QUIET = NO
+
+# The WARNINGS tag can be used to turn on/off the warning messages that are
+# generated by doxygen. Possible values are YES and NO. If left blank
+# NO is used.
+
+WARNINGS = YES
+
+# If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings
+# for undocumented members. If EXTRACT_ALL is set to YES then this flag will
+# automatically be disabled.
+
+WARN_IF_UNDOCUMENTED = YES
+
+# If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for
+# potential errors in the documentation, such as not documenting some
+# parameters in a documented function, or documenting parameters that
+# don't exist or using markup commands wrongly.
+
+WARN_IF_DOC_ERROR = YES
+
+# This WARN_NO_PARAMDOC option can be abled to get warnings for
+# functions that are documented, but have no documentation for their parameters
+# or return value. If set to NO (the default) doxygen will only warn about
+# wrong or incomplete parameter documentation, but not about the absence of
+# documentation.
+
+WARN_NO_PARAMDOC = NO
+
+# The WARN_FORMAT tag determines the format of the warning messages that
+# doxygen can produce. The string should contain the $file, $line, and $text
+# tags, which will be replaced by the file and line number from which the
+# warning originated and the warning text. Optionally the format may contain
+# $version, which will be replaced by the version of the file (if it could
+# be obtained via FILE_VERSION_FILTER)
+
+WARN_FORMAT = "$file:$line: $text"
+
+# The WARN_LOGFILE tag can be used to specify a file to which warning
+# and error messages should be written. If left blank the output is written
+# to stderr.
+
+WARN_LOGFILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the input files
+#---------------------------------------------------------------------------
+
+# The INPUT tag can be used to specify the files and/or directories that contain
+# documented source files. You may enter file names like "myfile.cpp" or
+# directories like "/usr/src/myproject". Separate the files or directories
+# with spaces.
+
+INPUT = ../apt-pkg
+
+# If the value of the INPUT tag contains directories, you can use the
+# FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank the following patterns are tested:
+# *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx
+# *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py
+
+FILE_PATTERNS = *.cc \
+ *.h
+
+# The RECURSIVE tag can be used to turn specify whether or not subdirectories
+# should be searched for input files as well. Possible values are YES and NO.
+# If left blank NO is used.
+
+RECURSIVE = YES
+
+# The EXCLUDE tag can be used to specify files and/or directories that should
+# excluded from the INPUT source files. This way you can easily exclude a
+# subdirectory from a directory tree whose root is specified with the INPUT tag.
+
+EXCLUDE =
+
+# The EXCLUDE_SYMLINKS tag can be used select whether or not files or
+# directories that are symbolic links (a Unix filesystem feature) are excluded
+# from the input.
+
+EXCLUDE_SYMLINKS = NO
+
+# If the value of the INPUT tag contains directories, you can use the
+# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
+# certain files from those directories. Note that the wildcards are matched
+# against the file with absolute path, so to exclude all test directories
+# for example use the pattern */test/*
+
+EXCLUDE_PATTERNS =
+
+# The EXAMPLE_PATH tag can be used to specify one or more files or
+# directories that contain example code fragments that are included (see
+# the \include command).
+
+EXAMPLE_PATH =
+
+# If the value of the EXAMPLE_PATH tag contains directories, you can use the
+# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp
+# and *.h) to filter out the source-files in the directories. If left
+# blank all files are included.
+
+EXAMPLE_PATTERNS =
+
+# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
+# searched for input files to be used with the \include or \dontinclude
+# commands irrespective of the value of the RECURSIVE tag.
+# Possible values are YES and NO. If left blank NO is used.
+
+EXAMPLE_RECURSIVE = NO
+
+# The IMAGE_PATH tag can be used to specify one or more files or
+# directories that contain image that are included in the documentation (see
+# the \image command).
+
+IMAGE_PATH =
+
+# The INPUT_FILTER tag can be used to specify a program that doxygen should
+# invoke to filter for each input file. Doxygen will invoke the filter program
+# by executing (via popen()) the command <filter> <input-file>, where <filter>
+# is the value of the INPUT_FILTER tag, and <input-file> is the name of an
+# input file. Doxygen will then use the output that the filter program writes
+# to standard output. If FILTER_PATTERNS is specified, this tag will be
+# ignored.
+
+INPUT_FILTER =
+
+# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
+# basis. Doxygen will compare the file name with each pattern and apply the
+# filter if there is a match. The filters are a list of the form:
+# pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further
+# info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER
+# is applied to all files.
+
+FILTER_PATTERNS =
+
+# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
+# INPUT_FILTER) will be used to filter the input files when producing source
+# files to browse (i.e. when SOURCE_BROWSER is set to YES).
+
+FILTER_SOURCE_FILES = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to source browsing
+#---------------------------------------------------------------------------
+
+# If the SOURCE_BROWSER tag is set to YES then a list of source files will
+# be generated. Documented entities will be cross-referenced with these sources.
+# Note: To get rid of all source code in the generated output, make sure also
+# VERBATIM_HEADERS is set to NO.
+
+SOURCE_BROWSER = NO
+
+# Setting the INLINE_SOURCES tag to YES will include the body
+# of functions and classes directly in the documentation.
+
+INLINE_SOURCES = NO
+
+# Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct
+# doxygen to hide any special comment blocks from generated source code
+# fragments. Normal C and C++ comments will always remain visible.
+
+STRIP_CODE_COMMENTS = YES
+
+# If the REFERENCED_BY_RELATION tag is set to YES (the default)
+# then for each documented function all documented
+# functions referencing it will be listed.
+
+REFERENCED_BY_RELATION = YES
+
+# If the REFERENCES_RELATION tag is set to YES (the default)
+# then for each documented function all documented entities
+# called/used by that function will be listed.
+
+REFERENCES_RELATION = YES
+
+# If the USE_HTAGS tag is set to YES then the references to source code
+# will point to the HTML generated by the htags(1) tool instead of doxygen
+# built-in source browser. The htags tool is part of GNU's global source
+# tagging system (see http://www.gnu.org/software/global/global.html). You
+# will need version 4.8.6 or higher.
+
+USE_HTAGS = NO
+
+# If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen
+# will generate a verbatim copy of the header file for each class for
+# which an include is specified. Set to NO to disable this.
+
+VERBATIM_HEADERS = YES
+
+#---------------------------------------------------------------------------
+# configuration options related to the alphabetical class index
+#---------------------------------------------------------------------------
+
+# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index
+# of all compounds will be generated. Enable this if the project
+# contains a lot of classes, structs, unions or interfaces.
+
+ALPHABETICAL_INDEX = NO
+
+# If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then
+# the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns
+# in which this list will be split (can be a number in the range [1..20])
+
+COLS_IN_ALPHA_INDEX = 5
+
+# In case all classes in a project start with a common prefix, all
+# classes will be put under the same header in the alphabetical index.
+# The IGNORE_PREFIX tag can be used to specify one or more prefixes that
+# should be ignored while generating the index headers.
+
+IGNORE_PREFIX =
+
+#---------------------------------------------------------------------------
+# configuration options related to the HTML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_HTML tag is set to YES (the default) Doxygen will
+# generate HTML output.
+
+GENERATE_HTML = YES
+
+# The HTML_OUTPUT tag is used to specify where the HTML docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `html' will be used as the default path.
+
+HTML_OUTPUT = html
+
+# The HTML_FILE_EXTENSION tag can be used to specify the file extension for
+# each generated HTML page (for example: .htm,.php,.asp). If it is left blank
+# doxygen will generate files with .html extension.
+
+HTML_FILE_EXTENSION = .html
+
+# The HTML_HEADER tag can be used to specify a personal HTML header for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard header.
+
+HTML_HEADER =
+
+# The HTML_FOOTER tag can be used to specify a personal HTML footer for
+# each generated HTML page. If it is left blank doxygen will generate a
+# standard footer.
+
+HTML_FOOTER =
+
+# The HTML_STYLESHEET tag can be used to specify a user-defined cascading
+# style sheet that is used by each HTML page. It can be used to
+# fine-tune the look of the HTML output. If the tag is left blank doxygen
+# will generate a default style sheet. Note that doxygen will try to copy
+# the style sheet file to the HTML output directory, so don't put your own
+# stylesheet in the HTML output directory as well, or it will be erased!
+
+HTML_STYLESHEET =
+
+# If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes,
+# files or namespaces will be aligned in HTML using tables. If set to
+# NO a bullet list will be used.
+
+HTML_ALIGN_MEMBERS = YES
+
+# If the GENERATE_HTMLHELP tag is set to YES, additional index files
+# will be generated that can be used as input for tools like the
+# Microsoft HTML help workshop to generate a compressed HTML help file (.chm)
+# of the generated HTML documentation.
+
+GENERATE_HTMLHELP = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can
+# be used to specify the file name of the resulting .chm file. You
+# can add a path in front of the file if the result should not be
+# written to the html output directory.
+
+CHM_FILE =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can
+# be used to specify the location (absolute path including file name) of
+# the HTML help compiler (hhc.exe). If non-empty doxygen will try to run
+# the HTML help compiler on the generated index.hhp.
+
+HHC_LOCATION =
+
+# If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag
+# controls if a separate .chi index file is generated (YES) or that
+# it should be included in the master .chm file (NO).
+
+GENERATE_CHI = NO
+
+# If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag
+# controls whether a binary table of contents is generated (YES) or a
+# normal table of contents (NO) in the .chm file.
+
+BINARY_TOC = NO
+
+# The TOC_EXPAND flag can be set to YES to add extra items for group members
+# to the contents of the HTML help documentation and to the tree view.
+
+TOC_EXPAND = NO
+
+# The DISABLE_INDEX tag can be used to turn on/off the condensed index at
+# top of each HTML page. The value NO (the default) enables the index and
+# the value YES disables it.
+
+DISABLE_INDEX = NO
+
+# This tag can be used to set the number of enum values (range [1..20])
+# that doxygen will group on one line in the generated HTML documentation.
+
+ENUM_VALUES_PER_LINE = 4
+
+# If the GENERATE_TREEVIEW tag is set to YES, a side panel will be
+# generated containing a tree-like index structure (just like the one that
+# is generated for HTML Help). For this to work a browser that supports
+# JavaScript, DHTML, CSS and frames is required (for instance Mozilla 1.0+,
+# Netscape 6.0+, Internet explorer 5.0+, or Konqueror). Windows users are
+# probably better off using the HTML help feature.
+
+GENERATE_TREEVIEW = NO
+
+# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be
+# used to set the initial width (in pixels) of the frame in which the tree
+# is shown.
+
+TREEVIEW_WIDTH = 250
+
+#---------------------------------------------------------------------------
+# configuration options related to the LaTeX output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_LATEX tag is set to YES (the default) Doxygen will
+# generate Latex output.
+
+GENERATE_LATEX = YES
+
+# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `latex' will be used as the default path.
+
+LATEX_OUTPUT = latex
+
+# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
+# invoked. If left blank `latex' will be used as the default command name.
+
+LATEX_CMD_NAME = latex
+
+# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to
+# generate index for LaTeX. If left blank `makeindex' will be used as the
+# default command name.
+
+MAKEINDEX_CMD_NAME = makeindex
+
+# If the COMPACT_LATEX tag is set to YES Doxygen generates more compact
+# LaTeX documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_LATEX = NO
+
+# The PAPER_TYPE tag can be used to set the paper type that is used
+# by the printer. Possible values are: a4, a4wide, letter, legal and
+# executive. If left blank a4wide will be used.
+
+PAPER_TYPE = a4wide
+
+# The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX
+# packages that should be included in the LaTeX output.
+
+EXTRA_PACKAGES =
+
+# The LATEX_HEADER tag can be used to specify a personal LaTeX header for
+# the generated latex document. The header should contain everything until
+# the first chapter. If it is left blank doxygen will generate a
+# standard header. Notice: only use this tag if you know what you are doing!
+
+LATEX_HEADER =
+
+# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated
+# is prepared for conversion to pdf (using ps2pdf). The pdf file will
+# contain links (just like the HTML output) instead of page references
+# This makes the output suitable for online browsing using a pdf viewer.
+
+PDF_HYPERLINKS = NO
+
+# If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of
+# plain latex in the generated Makefile. Set this option to YES to get a
+# higher quality PDF documentation.
+
+USE_PDFLATEX = NO
+
+# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode.
+# command to the generated LaTeX files. This will instruct LaTeX to keep
+# running if errors occur, instead of asking the user for help.
+# This option is also used when generating formulas in HTML.
+
+LATEX_BATCHMODE = NO
+
+# If LATEX_HIDE_INDICES is set to YES then doxygen will not
+# include the index chapters (such as File Index, Compound Index, etc.)
+# in the output.
+
+LATEX_HIDE_INDICES = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the RTF output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output
+# The RTF output is optimized for Word 97 and may not look very pretty with
+# other RTF readers or editors.
+
+GENERATE_RTF = NO
+
+# The RTF_OUTPUT tag is used to specify where the RTF docs will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `rtf' will be used as the default path.
+
+RTF_OUTPUT = rtf
+
+# If the COMPACT_RTF tag is set to YES Doxygen generates more compact
+# RTF documents. This may be useful for small projects and may help to
+# save some trees in general.
+
+COMPACT_RTF = NO
+
+# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated
+# will contain hyperlink fields. The RTF file will
+# contain links (just like the HTML output) instead of page references.
+# This makes the output suitable for online browsing using WORD or other
+# programs which support those fields.
+# Note: wordpad (write) and others do not support links.
+
+RTF_HYPERLINKS = NO
+
+# Load stylesheet definitions from file. Syntax is similar to doxygen's
+# config file, i.e. a series of assignments. You only have to provide
+# replacements, missing definitions are set to their default value.
+
+RTF_STYLESHEET_FILE =
+
+# Set optional variables used in the generation of an rtf document.
+# Syntax is similar to doxygen's config file.
+
+RTF_EXTENSIONS_FILE =
+
+#---------------------------------------------------------------------------
+# configuration options related to the man page output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_MAN tag is set to YES (the default) Doxygen will
+# generate man pages
+
+GENERATE_MAN = NO
+
+# The MAN_OUTPUT tag is used to specify where the man pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `man' will be used as the default path.
+
+MAN_OUTPUT = man
+
+# The MAN_EXTENSION tag determines the extension that is added to
+# the generated man pages (default is the subroutine's section .3)
+
+MAN_EXTENSION = .3
+
+# If the MAN_LINKS tag is set to YES and Doxygen generates man output,
+# then it will generate one additional man file for each entity
+# documented in the real man page(s). These additional files
+# only source the real man page, but without them the man command
+# would be unable to find the correct page. The default is NO.
+
+MAN_LINKS = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the XML output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_XML tag is set to YES Doxygen will
+# generate an XML file that captures the structure of
+# the code including all documentation.
+
+GENERATE_XML = NO
+
+# The XML_OUTPUT tag is used to specify where the XML pages will be put.
+# If a relative path is entered the value of OUTPUT_DIRECTORY will be
+# put in front of it. If left blank `xml' will be used as the default path.
+
+XML_OUTPUT = xml
+
+# The XML_SCHEMA tag can be used to specify an XML schema,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_SCHEMA =
+
+# The XML_DTD tag can be used to specify an XML DTD,
+# which can be used by a validating XML parser to check the
+# syntax of the XML files.
+
+XML_DTD =
+
+# If the XML_PROGRAMLISTING tag is set to YES Doxygen will
+# dump the program listings (including syntax highlighting
+# and cross-referencing information) to the XML output. Note that
+# enabling this will significantly increase the size of the XML output.
+
+XML_PROGRAMLISTING = YES
+
+#---------------------------------------------------------------------------
+# configuration options for the AutoGen Definitions output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will
+# generate an AutoGen Definitions (see autogen.sf.net) file
+# that captures the structure of the code including all
+# documentation. Note that this feature is still experimental
+# and incomplete at the moment.
+
+GENERATE_AUTOGEN_DEF = NO
+
+#---------------------------------------------------------------------------
+# configuration options related to the Perl module output
+#---------------------------------------------------------------------------
+
+# If the GENERATE_PERLMOD tag is set to YES Doxygen will
+# generate a Perl module file that captures the structure of
+# the code including all documentation. Note that this
+# feature is still experimental and incomplete at the
+# moment.
+
+GENERATE_PERLMOD = NO
+
+# If the PERLMOD_LATEX tag is set to YES Doxygen will generate
+# the necessary Makefile rules, Perl scripts and LaTeX code to be able
+# to generate PDF and DVI output from the Perl module output.
+
+PERLMOD_LATEX = NO
+
+# If the PERLMOD_PRETTY tag is set to YES the Perl module output will be
+# nicely formatted so it can be parsed by a human reader. This is useful
+# if you want to understand what is going on. On the other hand, if this
+# tag is set to NO the size of the Perl module output will be much smaller
+# and Perl will parse it just the same.
+
+PERLMOD_PRETTY = YES
+
+# The names of the make variables in the generated doxyrules.make file
+# are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX.
+# This is useful so different doxyrules.make files included by the same
+# Makefile don't overwrite each other's variables.
+
+PERLMOD_MAKEVAR_PREFIX =
+
+#---------------------------------------------------------------------------
+# Configuration options related to the preprocessor
+#---------------------------------------------------------------------------
+
+# If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will
+# evaluate all C-preprocessor directives found in the sources and include
+# files.
+
+ENABLE_PREPROCESSING = YES
+
+# If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro
+# names in the source code. If set to NO (the default) only conditional
+# compilation will be performed. Macro expansion can be done in a controlled
+# way by setting EXPAND_ONLY_PREDEF to YES.
+
+MACRO_EXPANSION = NO
+
+# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES
+# then the macro expansion is limited to the macros specified with the
+# PREDEFINED and EXPAND_AS_DEFINED tags.
+
+EXPAND_ONLY_PREDEF = NO
+
+# If the SEARCH_INCLUDES tag is set to YES (the default) the includes files
+# in the INCLUDE_PATH (see below) will be search if a #include is found.
+
+SEARCH_INCLUDES = YES
+
+# The INCLUDE_PATH tag can be used to specify one or more directories that
+# contain include files that are not input files but should be processed by
+# the preprocessor.
+
+INCLUDE_PATH =
+
+# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
+# patterns (like *.h and *.hpp) to filter out the header-files in the
+# directories. If left blank, the patterns specified with FILE_PATTERNS will
+# be used.
+
+INCLUDE_FILE_PATTERNS =
+
+# The PREDEFINED tag can be used to specify one or more macro names that
+# are defined before the preprocessor is started (similar to the -D option of
+# gcc). The argument of the tag is a list of macros of the form: name
+# or name=definition (no spaces). If the definition and the = are
+# omitted =1 is assumed. To prevent a macro definition from being
+# undefined via #undef or recursively expanded use the := operator
+# instead of the = operator.
+
+PREDEFINED =
+
+# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then
+# this tag can be used to specify a list of macro names that should be expanded.
+# The macro definition that is found in the sources will be used.
+# Use the PREDEFINED tag if you want to use a different macro definition.
+
+EXPAND_AS_DEFINED =
+
+# If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then
+# doxygen's preprocessor will remove all function-like macros that are alone
+# on a line, have an all uppercase name, and do not end with a semicolon. Such
+# function macros are typically used for boiler-plate code, and will confuse
+# the parser if not removed.
+
+SKIP_FUNCTION_MACROS = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to external references
+#---------------------------------------------------------------------------
+
+# The TAGFILES option can be used to specify one or more tagfiles.
+# Optionally an initial location of the external documentation
+# can be added for each tagfile. The format of a tag file without
+# this location is as follows:
+# TAGFILES = file1 file2 ...
+# Adding location for the tag files is done as follows:
+# TAGFILES = file1=loc1 "file2 = loc2" ...
+# where "loc1" and "loc2" can be relative or absolute paths or
+# URLs. If a location is present for each tag, the installdox tool
+# does not have to be run to correct the links.
+# Note that each tag file must have a unique name
+# (where the name does NOT include the path)
+# If a tag file is not located in the directory in which doxygen
+# is run, you must also specify the path to the tagfile here.
+
+TAGFILES =
+
+# When a file name is specified after GENERATE_TAGFILE, doxygen will create
+# a tag file that is based on the input files it reads.
+
+GENERATE_TAGFILE =
+
+# If the ALLEXTERNALS tag is set to YES all external classes will be listed
+# in the class index. If set to NO only the inherited external classes
+# will be listed.
+
+ALLEXTERNALS = NO
+
+# If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed
+# in the modules index. If set to NO, only the current project's groups will
+# be listed.
+
+EXTERNAL_GROUPS = YES
+
+# The PERL_PATH should be the absolute path and name of the perl script
+# interpreter (i.e. the result of `which perl').
+
+PERL_PATH = /usr/bin/perl
+
+#---------------------------------------------------------------------------
+# Configuration options related to the dot tool
+#---------------------------------------------------------------------------
+
+# If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will
+# generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base
+# or super classes. Setting the tag to NO turns the diagrams off. Note that
+# this option is superseded by the HAVE_DOT option below. This is only a
+# fallback. It is recommended to install and use dot, since it yields more
+# powerful graphs.
+
+CLASS_DIAGRAMS = YES
+
+# If set to YES, the inheritance and collaboration graphs will hide
+# inheritance and usage relations if the target is undocumented
+# or is not a class.
+
+HIDE_UNDOC_RELATIONS = YES
+
+# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
+# available from the path. This tool is part of Graphviz, a graph visualization
+# toolkit from AT&T and Lucent Bell Labs. The other options in this section
+# have no effect if this option is set to NO (the default)
+
+HAVE_DOT = @HAVE_DOT@
+
+# If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect inheritance relations. Setting this tag to YES will force the
+# the CLASS_DIAGRAMS tag to NO.
+
+CLASS_GRAPH = YES
+
+# If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for each documented class showing the direct and
+# indirect implementation dependencies (inheritance, containment, and
+# class references variables) of the class with other documented classes.
+
+COLLABORATION_GRAPH = YES
+
+# If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen
+# will generate a graph for groups, showing the direct groups dependencies
+
+GROUP_GRAPHS = YES
+
+# If the UML_LOOK tag is set to YES doxygen will generate inheritance and
+# collaboration diagrams in a style similar to the OMG's Unified Modeling
+# Language.
+
+UML_LOOK = NO
+
+# If set to YES, the inheritance and collaboration graphs will show the
+# relations between templates and their instances.
+
+TEMPLATE_RELATIONS = NO
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT
+# tags are set to YES then doxygen will generate a graph for each documented
+# file showing the direct and indirect include dependencies of the file with
+# other documented files.
+
+INCLUDE_GRAPH = YES
+
+# If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and
+# HAVE_DOT tags are set to YES then doxygen will generate a graph for each
+# documented header file showing the documented files that directly or
+# indirectly include this file.
+
+INCLUDED_BY_GRAPH = YES
+
+# If the CALL_GRAPH and HAVE_DOT tags are set to YES then doxygen will
+# generate a call dependency graph for every global function or class method.
+# Note that enabling this option will significantly increase the time of a run.
+# So in most cases it will be better to enable call graphs for selected
+# functions only using the \callgraph command.
+
+CALL_GRAPH = NO
+
+# If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen
+# will graphical hierarchy of all classes instead of a textual one.
+
+GRAPHICAL_HIERARCHY = YES
+
+# If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES
+# then doxygen will show the dependencies a directory has on other directories
+# in a graphical way. The dependency relations are determined by the #include
+# relations between the files in the directories.
+
+DIRECTORY_GRAPH = YES
+
+# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
+# generated by dot. Possible values are png, jpg, or gif
+# If left blank png will be used.
+
+DOT_IMAGE_FORMAT = png
+
+# The tag DOT_PATH can be used to specify the path where the dot tool can be
+# found. If left blank, it is assumed the dot tool can be found in the path.
+
+DOT_PATH = @DOTDIR@
+
+# The DOTFILE_DIRS tag can be used to specify one or more directories that
+# contain dot files that are included in the documentation (see the
+# \dotfile command).
+
+DOTFILE_DIRS =
+
+# The MAX_DOT_GRAPH_WIDTH tag can be used to set the maximum allowed width
+# (in pixels) of the graphs generated by dot. If a graph becomes larger than
+# this value, doxygen will try to truncate the graph, so that it fits within
+# the specified constraint. Beware that most browsers cannot cope with very
+# large images.
+
+MAX_DOT_GRAPH_WIDTH = 1024
+
+# The MAX_DOT_GRAPH_HEIGHT tag can be used to set the maximum allows height
+# (in pixels) of the graphs generated by dot. If a graph becomes larger than
+# this value, doxygen will try to truncate the graph, so that it fits within
+# the specified constraint. Beware that most browsers cannot cope with very
+# large images.
+
+MAX_DOT_GRAPH_HEIGHT = 1024
+
+# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the
+# graphs generated by dot. A depth value of 3 means that only nodes reachable
+# from the root by following a path via at most 3 edges will be shown. Nodes
+# that lay further from the root node will be omitted. Note that setting this
+# option to 1 or 2 may greatly reduce the computation time needed for large
+# code bases. Also note that a graph may be further truncated if the graph's
+# image dimensions are not sufficient to fit the graph (see MAX_DOT_GRAPH_WIDTH
+# and MAX_DOT_GRAPH_HEIGHT). If 0 is used for the depth value (the default),
+# the graph is not depth-constrained.
+
+MAX_DOT_GRAPH_DEPTH = 0
+
+# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
+# background. This is disabled by default, which results in a white background.
+# Warning: Depending on the platform used, enabling this option may lead to
+# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
+# read).
+
+DOT_TRANSPARENT = NO
+
+# Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output
+# files in one run (i.e. multiple -o and -T options on the command line). This
+# makes dot run faster, but since only newer versions of dot (>1.8.10)
+# support this, this feature is disabled by default.
+
+DOT_MULTI_TARGETS = NO
+
+# If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will
+# generate a legend page explaining the meaning of the various boxes and
+# arrows in the dot generated graphs.
+
+GENERATE_LEGEND = YES
+
+# If the DOT_CLEANUP tag is set to YES (the default) Doxygen will
+# remove the intermediate dot files that are used to generate
+# the various graphs.
+
+DOT_CLEANUP = YES
+
+#---------------------------------------------------------------------------
+# Configuration::additions related to the search engine
+#---------------------------------------------------------------------------
+
+# The SEARCHENGINE tag specifies whether or not a search engine should be
+# used. If set to NO the values of all tags below this one will be ignored.
+
+SEARCHENGINE = NO
diff --git a/doc/apt_preferences.5.xml b/doc/apt_preferences.5.xml
index 3e50bef8c..12b03196a 100644
--- a/doc/apt_preferences.5.xml
+++ b/doc/apt_preferences.5.xml
@@ -183,7 +183,7 @@ belonging to any distribution whose Archive name is "<literal>unstable</literal>
<programlisting>
Package: *
Pin: release a=unstable
-Pin-Priority: 50
+Pin-Priority: 500
</programlisting>
<simpara>The following record assigns a high priority to all package versions
diff --git a/doc/examples/configure-index b/doc/examples/configure-index
index ac2f2997d..831e276c7 100644
--- a/doc/examples/configure-index
+++ b/doc/examples/configure-index
@@ -24,11 +24,15 @@ APT
{
Architecture "i386";
Build-Essential "build-essential";
-
+
+ NeverAutoRemove { "linux-kernel.*"; }; // packages that should never
+ // considered for autoRemove
+
// Options for apt-get
Get
{
Arch-Only "false";
+ AutomaticRemove "false";
Download-Only "false";
Simulate "false";
Assume-Yes "false";
@@ -90,6 +94,10 @@ APT
Cache-Limit "4194304";
Default-Release "";
+ // consider Recommends, Suggests as important dependencies that should
+ // be installed by default
+ APT::Install-Recommends "false";
+ APT::Install-Suggests "false";
// Write progress messages on this fd (for stuff like base-config)
Status-Fd "-1";
@@ -253,8 +261,8 @@ Debug
pkgDPkgPM "false";
pkgDPkgProgressReporting "false";
pkgOrderList "false";
+ pkgAutoRemove "false"; // show information about automatic removes
BuildDeps "false";
-
pkgInitialize "false"; // This one will dump the configuration space
NoLocking "false";
Acquire::Ftp "false"; // Show ftp command traffic
diff --git a/doc/makefile b/doc/makefile
index 31ee061fb..5f774b825 100644
--- a/doc/makefile
+++ b/doc/makefile
@@ -42,3 +42,24 @@ doc.ja: %.ja:
doc.pl: %.pl:
$(MAKE) -C pl $*
+
+ifdef DOXYGEN
+DOXYGEN_SOURCES = $(shell find $(BASE)/apt-pkg -not -name .\\\#* -and \( -name \*.cc -or -name \*.h \) )
+
+clean: doxygen-clean
+
+doxygen-clean:
+ rm -fr $(BUILD)/doc/doxygen
+ rm -f $(BUILD)/doc/doxygen-stamp
+
+$(BUILD)/doc/Doxyfile: Doxyfile.in
+ (cd $(BUILD) && ./config.status doc/Doxyfile)
+
+$(BUILD)/doc/doxygen-stamp: $(DOXYGEN_SOURCES) $(BUILD)/doc/Doxyfile
+ rm -fr $(BUILD)/doc/doxygen
+ $(DOXYGEN) $(BUILD)/doc/Doxyfile
+ touch $(BUILD)/doc/doxygen-stamp
+
+doc: $(BUILD)/doc/doxygen-stamp
+
+endif
diff --git a/methods/gzip.cc b/methods/gzip.cc
index a8e816bf3..f732c0b86 100644
--- a/methods/gzip.cc
+++ b/methods/gzip.cc
@@ -55,7 +55,7 @@ bool GzipMethod::Fetch(FetchItem *Itm)
// if the file is empty, just rename it and return
if(From.Size() == 0)
{
- Rename(Path, Itm->DestFile);
+ rename(Path.c_str(), Itm->DestFile.c_str());
return true;
}
diff --git a/methods/makefile b/methods/makefile
index d0b5a28c0..5bd05eae1 100644
--- a/methods/makefile
+++ b/methods/makefile
@@ -7,7 +7,7 @@ include ../buildlib/defaults.mak
BIN := $(BIN)/methods
# FIXME..
-LIB_APT_PKG_MAJOR = 3.11
+LIB_APT_PKG_MAJOR = 4.2
APT_DOMAIN := libapt-pkg$(LIB_APT_PKG_MAJOR)
# The file method