diff options
47 files changed, 1489 insertions, 566 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index afb3daad3..10e80eb56 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -274,7 +274,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ if(last_space != string::npos) Description.erase(last_space, Description.size()-last_space); new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, available_patches); + ExpectedHash, ServerSha1, available_patches); Complete = false; Status = StatDone; Dequeue(); @@ -342,9 +342,10 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long Size,string Md5Hash, /*{ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner, string URI,string URIDesc,string ShortDesc, HashString ExpectedHash, + string ServerSha1, vector<DiffInfo> diffs) : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), - available_patches(diffs) + available_patches(diffs), ServerSha1(ServerSha1) { DestFile = _config->FindDir("Dir::State::lists") + "partial/"; @@ -430,6 +431,13 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/ std::clog << "QueueNextDiff: " << FinalFile << " (" << local_sha1 << ")"<<std::endl; + // final file reached before all patches are applied + if(local_sha1 == ServerSha1) + { + Finish(true); + return true; + } + // remove all patches until the next matching patch is found // this requires the Index file to be ordered for(vector<DiffInfo>::iterator I=available_patches.begin(); @@ -527,7 +535,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long Size,string Md5Hash, /* // see if there is more to download if(available_patches.size() > 0) { new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, available_patches); + ExpectedHash, ServerSha1, available_patches); return Finish(); } else return Finish(true); diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 3f073de5b..d862d0fdd 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -422,6 +422,10 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item * off the front? */ vector<DiffInfo> available_patches; + + /** Stop applying patches when reaching that sha1 */ + string ServerSha1; + /** The current status of this patch. */ enum DiffState { @@ -475,6 +479,7 @@ class pkgAcqIndexDiffs : public pkgAcquire::Item */ pkgAcqIndexDiffs(pkgAcquire *Owner,string URI,string URIDesc, string ShortDesc, HashString ExpectedHash, + string ServerSha1, vector<DiffInfo> diffs=vector<DiffInfo>()); }; /*}}}*/ diff --git a/apt-pkg/aptconfiguration.cc b/apt-pkg/aptconfiguration.cc index 45ae9bed5..899004d9f 100644 --- a/apt-pkg/aptconfiguration.cc +++ b/apt-pkg/aptconfiguration.cc @@ -87,4 +87,140 @@ const Configuration::getCompressionTypes(bool const &Cached) { return types; } /*}}}*/ +// GetLanguages - Return Vector of Language Codes /*{{{*/ +// --------------------------------------------------------------------- +/* return a vector of language codes in the prefered order. + the special word "environment" will be replaced with the long and the short + code of the local settings and it will be insured that this will not add + duplicates. So in an german local the setting "environment, de_DE, en, de" + will result in "de_DE, de, en". + The special word "none" is the stopcode for the not-All code vector */ +std::vector<std::string> const Configuration::getLanguages(bool const &All, + bool const &Cached, char const * const Locale) { + using std::string; + + // The detection is boring and has a lot of cornercases, + // so we cache the results to calculated it only once. + std::vector<string> static allCodes; + std::vector<string> static codes; + + // we have something in the cache + if (codes.empty() == false || allCodes.empty() == false) { + if (Cached == true) { + if(All == true && allCodes.empty() == false) + return allCodes; + else + return codes; + } else { + allCodes.clear(); + codes.clear(); + } + } + + // get the environment language code + // we extract both, a long and a short code and then we will + // check if we actually need both (rare) or if the short is enough + string const envMsg = string(Locale == 0 ? std::setlocale(LC_MESSAGES, NULL) : Locale); + size_t const lenShort = (envMsg.find('_') != string::npos) ? envMsg.find('_') : 2; + size_t const lenLong = (envMsg.find('.') != string::npos) ? envMsg.find('.') : (lenShort + 3); + + string envLong = envMsg.substr(0,lenLong); + string const envShort = envLong.substr(0,lenShort); + bool envLongIncluded = true, envShortIncluded = false; + + // first cornercase: LANG=C, so we use only "en" Translation + if (envLong == "C") { + codes.push_back("en"); + return codes; + } + + if (envLong != envShort) { + // to save the servers from unneeded queries, we only try also long codes + // for languages it is realistic to have a long code translation file... + char const *needLong[] = { "cs", "en", "pt", "sv", "zh", NULL }; + for (char const **l = needLong; *l != NULL; l++) + if (envShort.compare(*l) == 0) { + envLongIncluded = false; + break; + } + } + + // we don't add the long code, but we allow the user to do so + if (envLongIncluded == true) + envLong.clear(); + + // FIXME: Remove support for the old APT::Acquire::Translation + // it was undocumented and so it should be not very widthly used + string const oldAcquire = _config->Find("APT::Acquire::Translation",""); + if (oldAcquire.empty() == false && oldAcquire != "environment") { + if (oldAcquire != "none") + codes.push_back(oldAcquire); + return codes; + } + + // Support settings like Acquire::Translation=none on the command line to + // override the configuration settings vector of languages. + string const forceLang = _config->Find("Acquire::Languages",""); + if (forceLang.empty() == false) { + if (forceLang == "environment") { + if (envLongIncluded == false) + codes.push_back(envLong); + if (envShortIncluded == false) + codes.push_back(envShort); + return codes; + } else if (forceLang != "none") + codes.push_back(forceLang); + return codes; + } + + std::vector<string> const lang = _config->FindVector("Acquire::Languages"); + // the default setting -> "environment, en" + if (lang.empty() == true) { + if (envLongIncluded == false) + codes.push_back(envLong); + if (envShortIncluded == false) + codes.push_back(envShort); + if (envShort != "en") + codes.push_back("en"); + return codes; + } + + // the configs define the order, so add the environment + // then needed and ensure the codes are not listed twice. + bool noneSeen = false; + for (std::vector<string>::const_iterator l = lang.begin(); + l != lang.end(); l++) { + if (*l == "environment") { + if (envLongIncluded == true && envShortIncluded == true) + continue; + if (envLongIncluded == false) { + envLongIncluded = true; + if (noneSeen == false) + codes.push_back(envLong); + allCodes.push_back(envLong); + } + if (envShortIncluded == false) { + envShortIncluded = true; + if (noneSeen == false) + codes.push_back(envShort); + allCodes.push_back(envShort); + } + continue; + } else if (*l == "none") { + noneSeen = true; + continue; + } else if ((envLongIncluded == true && *l == envLong) || + (envShortIncluded == true && *l == envShort)) + continue; + + if (noneSeen == false) + codes.push_back(*l); + allCodes.push_back(*l); + } + if (All == true) + return allCodes; + else + return codes; +} + /*}}}*/ } diff --git a/apt-pkg/aptconfiguration.h b/apt-pkg/aptconfiguration.h index 6a123adce..f2f04a39b 100644 --- a/apt-pkg/aptconfiguration.h +++ b/apt-pkg/aptconfiguration.h @@ -39,6 +39,33 @@ public: /*{{{*/ * \return a vector of (all) Language Codes in the prefered usage order */ std::vector<std::string> static const getCompressionTypes(bool const &Cached = true); + + /** \brief Returns a vector of Language Codes + * + * Languages can be defined with their two or five chars long code. + * This methods handles the various ways to set the prefered codes, + * honors the environment and ensures that the codes are not listed twice. + * + * The special word "environment" will be replaced with the long and the short + * code of the local settings and it will be insured that this will not add + * duplicates. So in an german local the setting "environment, de_DE, en, de" + * will result in "de_DE, de, en". + * + * Another special word is "none" which separates the prefered from all codes + * in this setting. So setting and method can be used to get codes the user want + * to see or to get all language codes APT (should) have Translations available. + * + * \param All return all codes or only codes for languages we want to use + * \param Cached saves the result so we need to calculated it only once + * this parameter should ony be used for testing purposes. + * \param Locale don't get the locale from the system but use this one instead + * this parameter should ony be used for testing purposes. + * + * \return a vector of (all) Language Codes in the prefered usage order + */ + std::vector<std::string> static const getLanguages(bool const &All = false, + bool const &Cached = true, char const * const Locale = 0); + /*}}}*/ }; /*}}}*/ diff --git a/apt-pkg/contrib/configuration.cc b/apt-pkg/contrib/configuration.cc index 4e8586e83..ff49ce857 100644 --- a/apt-pkg/contrib/configuration.cc +++ b/apt-pkg/contrib/configuration.cc @@ -85,7 +85,7 @@ Configuration::~Configuration() /* This will lookup a single item by name below another item. It is a helper function for the main lookup function */ Configuration::Item *Configuration::Lookup(Item *Head,const char *S, - unsigned long Len,bool Create) + unsigned long const &Len,bool const &Create) { int Res = 1; Item *I = Head->Child; @@ -118,7 +118,7 @@ Configuration::Item *Configuration::Lookup(Item *Head,const char *S, // --------------------------------------------------------------------- /* This performs a fully scoped lookup of a given name, possibly creating new items */ -Configuration::Item *Configuration::Lookup(const char *Name,bool Create) +Configuration::Item *Configuration::Lookup(const char *Name,bool const &Create) { if (Name == 0) return Root->Child; @@ -245,7 +245,7 @@ vector<string> Configuration::FindVector(const char *Name) const // Configuration::FindI - Find an integer value /*{{{*/ // --------------------------------------------------------------------- /* */ -int Configuration::FindI(const char *Name,int Default) const +int Configuration::FindI(const char *Name,int const &Default) const { const Item *Itm = Lookup(Name); if (Itm == 0 || Itm->Value.empty() == true) @@ -262,7 +262,7 @@ int Configuration::FindI(const char *Name,int Default) const // Configuration::FindB - Find a boolean type /*{{{*/ // --------------------------------------------------------------------- /* */ -bool Configuration::FindB(const char *Name,bool Default) const +bool Configuration::FindB(const char *Name,bool const &Default) const { const Item *Itm = Lookup(Name); if (Itm == 0 || Itm->Value.empty() == true) @@ -338,7 +338,7 @@ void Configuration::Set(const char *Name,const string &Value) // Configuration::Set - Set an integer value /*{{{*/ // --------------------------------------------------------------------- /* */ -void Configuration::Set(const char *Name,int Value) +void Configuration::Set(const char *Name,int const &Value) { Item *Itm = Lookup(Name,true); if (Itm == 0) @@ -351,7 +351,7 @@ void Configuration::Set(const char *Name,int Value) // Configuration::Clear - Clear an single value from a list /*{{{*/ // --------------------------------------------------------------------- /* */ -void Configuration::Clear(const string Name, int Value) +void Configuration::Clear(string const &Name, int const &Value) { char S[300]; snprintf(S,sizeof(S),"%i",Value); @@ -361,7 +361,7 @@ void Configuration::Clear(const string Name, int Value) // Configuration::Clear - Clear an single value from a list /*{{{*/ // --------------------------------------------------------------------- /* */ -void Configuration::Clear(const string Name, string Value) +void Configuration::Clear(string const &Name, string const &Value) { Item *Top = Lookup(Name.c_str(),false); if (Top == 0 || Top->Child == 0) @@ -392,7 +392,7 @@ void Configuration::Clear(const string Name, string Value) // Configuration::Clear - Clear an entire tree /*{{{*/ // --------------------------------------------------------------------- /* */ -void Configuration::Clear(string Name) +void Configuration::Clear(string const &Name) { Item *Top = Lookup(Name.c_str(),false); if (Top == 0) @@ -507,8 +507,8 @@ string Configuration::Item::FullTag(const Item *Stop) const sections like 'zone "foo.org" { .. };' This causes each section to be added in with a tag like "zone::foo.org" instead of being split tag/value. AsSectional enables Sectional parsing.*/ -bool ReadConfigFile(Configuration &Conf,const string &FName,bool AsSectional, - unsigned Depth) +bool ReadConfigFile(Configuration &Conf,const string &FName,bool const &AsSectional, + unsigned const &Depth) { // Open the stream for reading ifstream F(FName.c_str(),ios::in); @@ -835,8 +835,8 @@ bool ReadConfigFile(Configuration &Conf,const string &FName,bool AsSectional, // ReadConfigDir - Read a directory of config files /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ReadConfigDir(Configuration &Conf,const string &Dir,bool AsSectional, - unsigned Depth) +bool ReadConfigDir(Configuration &Conf,const string &Dir,bool const &AsSectional, + unsigned const &Depth) { DIR *D = opendir(Dir.c_str()); if (D == 0) diff --git a/apt-pkg/contrib/configuration.h b/apt-pkg/contrib/configuration.h index e2da83f5b..2494c1d7c 100644 --- a/apt-pkg/contrib/configuration.h +++ b/apt-pkg/contrib/configuration.h @@ -58,8 +58,8 @@ class Configuration Item *Root; bool ToFree; - Item *Lookup(Item *Head,const char *S,unsigned long Len,bool Create); - Item *Lookup(const char *Name,bool Create); + Item *Lookup(Item *Head,const char *S,unsigned long const &Len,bool const &Create); + Item *Lookup(const char *Name,const bool &Create); inline const Item *Lookup(const char *Name) const { return ((Configuration *)this)->Lookup(Name,false); @@ -68,32 +68,33 @@ class Configuration public: string Find(const char *Name,const char *Default = 0) const; - string Find(const string Name,const char *Default = 0) const {return Find(Name.c_str(),Default);}; + string Find(string const &Name,const char *Default = 0) const {return Find(Name.c_str(),Default);}; + string Find(string const &Name, string const &Default) const {return Find(Name.c_str(),Default.c_str());}; string FindFile(const char *Name,const char *Default = 0) const; string FindDir(const char *Name,const char *Default = 0) const; - std::vector<string> FindVector(const string &Name) const; + std::vector<string> FindVector(string const &Name) const; std::vector<string> FindVector(const char *Name) const; - int FindI(const char *Name,int Default = 0) const; - int FindI(const string Name,int Default = 0) const {return FindI(Name.c_str(),Default);}; - bool FindB(const char *Name,bool Default = false) const; - bool FindB(const string Name,bool Default = false) const {return FindB(Name.c_str(),Default);}; + int FindI(const char *Name,int const &Default = 0) const; + int FindI(string const &Name,int const &Default = 0) const {return FindI(Name.c_str(),Default);}; + bool FindB(const char *Name,bool const &Default = false) const; + bool FindB(string const &Name,bool const &Default = false) const {return FindB(Name.c_str(),Default);}; string FindAny(const char *Name,const char *Default = 0) const; - inline void Set(const string Name,string Value) {Set(Name.c_str(),Value);}; + inline void Set(const string &Name,const string &Value) {Set(Name.c_str(),Value);}; void CndSet(const char *Name,const string &Value); void Set(const char *Name,const string &Value); - void Set(const char *Name,int Value); + void Set(const char *Name,const int &Value); - inline bool Exists(const string Name) const {return Exists(Name.c_str());}; + inline bool Exists(const string &Name) const {return Exists(Name.c_str());}; bool Exists(const char *Name) const; bool ExistsAny(const char *Name) const; // clear a whole tree - void Clear(const string Name); + void Clear(const string &Name); // remove a certain value from a list (e.g. the list of "APT::Keep-Fds") - void Clear(const string List, string Value); - void Clear(const string List, int Value); + void Clear(string const &List, string const &Value); + void Clear(string const &List, int const &Value); inline const Item *Tree(const char *Name) const {return Lookup(Name);}; @@ -108,11 +109,11 @@ class Configuration extern Configuration *_config; bool ReadConfigFile(Configuration &Conf,const string &FName, - bool AsSectional = false, - unsigned Depth = 0); + bool const &AsSectional = false, + unsigned const &Depth = 0); bool ReadConfigDir(Configuration &Conf,const string &Dir, - bool AsSectional = false, - unsigned Depth = 0); + bool const &AsSectional = false, + unsigned const &Depth = 0); #endif diff --git a/apt-pkg/contrib/mmap.cc b/apt-pkg/contrib/mmap.cc index 4d5fcf71e..f440f9489 100644 --- a/apt-pkg/contrib/mmap.cc +++ b/apt-pkg/contrib/mmap.cc @@ -140,8 +140,10 @@ bool MMap::Sync(unsigned long Start,unsigned long Stop) // DynamicMMap::DynamicMMap - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long WorkSpace) : - MMap(F,Flags | NoImmMap), Fd(&F), WorkSpace(WorkSpace) +DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long const &Workspace, + unsigned long const &Grow, unsigned long const &Limit) : + MMap(F,Flags | NoImmMap), Fd(&F), WorkSpace(Workspace), + GrowFactor(Grow), Limit(Limit) { if (_error->PendingError() == true) return; @@ -165,32 +167,48 @@ DynamicMMap::DynamicMMap(FileFd &F,unsigned long Flags,unsigned long WorkSpace) /* We try here to use mmap to reserve some space - this is much more cooler than the fallback solution to simply allocate a char array and could come in handy later than we are able to grow such an mmap */ -DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long WorkSpace) : - MMap(Flags | NoImmMap | UnMapped), Fd(0), WorkSpace(WorkSpace) +DynamicMMap::DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace, + unsigned long const &Grow, unsigned long const &Limit) : + MMap(Flags | NoImmMap | UnMapped), Fd(0), WorkSpace(WorkSpace), + GrowFactor(Grow), Limit(Limit) { - if (_error->PendingError() == true) - return; + if (_error->PendingError() == true) + return; + + // disable Moveable if we don't grow + if (Grow == 0) + Flags &= ~Moveable; + +#ifndef __linux__ + // kfreebsd doesn't have mremap, so we use the fallback + if ((Flags & Moveable) == Moveable) + Flags |= Fallback; +#endif #ifdef _POSIX_MAPPED_FILES - // Set the permissions. - int Prot = PROT_READ; - int Map = MAP_PRIVATE | MAP_ANONYMOUS; - if ((Flags & ReadOnly) != ReadOnly) - Prot |= PROT_WRITE; - if ((Flags & Public) == Public) - Map = MAP_SHARED | MAP_ANONYMOUS; + if ((Flags & Fallback) != Fallback) { + // Set the permissions. + int Prot = PROT_READ; + int Map = MAP_PRIVATE | MAP_ANONYMOUS; + if ((Flags & ReadOnly) != ReadOnly) + Prot |= PROT_WRITE; + if ((Flags & Public) == Public) + Map = MAP_SHARED | MAP_ANONYMOUS; - // use anonymous mmap() to get the memory - Base = (unsigned char*) mmap(0, WorkSpace, Prot, Map, -1, 0); + // use anonymous mmap() to get the memory + Base = (unsigned char*) mmap(0, WorkSpace, Prot, Map, -1, 0); - if(Base == MAP_FAILED) - _error->Errno("DynamicMMap",_("Couldn't make mmap of %lu bytes"),WorkSpace); -#else - // fallback to a static allocated space - Base = new unsigned char[WorkSpace]; - memset(Base,0,WorkSpace); + if(Base == MAP_FAILED) + _error->Errno("DynamicMMap",_("Couldn't make mmap of %lu bytes"),WorkSpace); + + iSize = 0; + return; + } #endif - iSize = 0; + // fallback to a static allocated space + Base = new unsigned char[WorkSpace]; + memset(Base,0,WorkSpace); + iSize = 0; } /*}}}*/ // DynamicMMap::~DynamicMMap - Destructor /*{{{*/ @@ -311,30 +329,55 @@ unsigned long DynamicMMap::WriteString(const char *String, /*}}}*/ // DynamicMMap::Grow - Grow the mmap /*{{{*/ // --------------------------------------------------------------------- -/* This method will try to grow the mmap we currently use. This doesn't - work most of the time because we can't move the mmap around in the - memory for now as this would require to adjust quite a lot of pointers - but why we should not at least try to grow it before we give up? */ -bool DynamicMMap::Grow() -{ -#if defined(_POSIX_MAPPED_FILES) && defined(__linux__) - unsigned long newSize = WorkSpace + 1024*1024; +/* This method is a wrapper around different methods to (try to) grow + a mmap (or our char[]-fallback). Encounterable environments: + 1. Moveable + !Fallback + linux -> mremap with MREMAP_MAYMOVE + 2. Moveable + !Fallback + !linux -> not possible (forbidden by constructor) + 3. Moveable + Fallback -> realloc + 4. !Moveable + !Fallback + linux -> mremap alone - which will fail in 99,9% + 5. !Moveable + !Fallback + !linux -> not possible (forbidden by constructor) + 6. !Moveable + Fallback -> not possible + [ While Moveable and Fallback stands for the equally named flags and + "linux" indicates a linux kernel instead of a freebsd kernel. ] + So what you can see here is, that a MMAP which want to be growable need + to be moveable to have a real chance but that this method will at least try + the nearly impossible 4 to grow it before it finally give up: Never say never. */ +bool DynamicMMap::Grow() { + if (Limit != 0 && WorkSpace >= Limit) + return _error->Error(_("The size of a MMap has already reached the defined limit of %lu bytes," + "abort the try to grow the MMap."), Limit); - if(Fd != 0) - { - Fd->Seek(newSize - 1); - char C = 0; - Fd->Write(&C,sizeof(C)); - } + unsigned long const newSize = WorkSpace + 1024*1024; - Base = mremap(Base, WorkSpace, newSize, 0); - if(Base == MAP_FAILED) - return false; + if(Fd != 0) { + Fd->Seek(newSize - 1); + char C = 0; + Fd->Write(&C,sizeof(C)); + } + if ((Flags & Fallback) != Fallback) { +#if defined(_POSIX_MAPPED_FILES) && defined(__linux__) + #ifdef MREMAP_MAYMOVE + if ((Flags & Moveable) == Moveable) + Base = mremap(Base, WorkSpace, newSize, MREMAP_MAYMOVE); + else + #endif + Base = mremap(Base, WorkSpace, newSize, 0); - WorkSpace = newSize; - return true; + if(Base == MAP_FAILED) + return false; #else - return false; + return false; #endif + } else { + if ((Flags & Moveable) != Moveable) + return false; + + Base = realloc(Base, newSize); + if (Base == NULL) + return false; + } + + WorkSpace = newSize; + return true; } /*}}}*/ diff --git a/apt-pkg/contrib/mmap.h b/apt-pkg/contrib/mmap.h index bde62217d..cd2b15ba2 100644 --- a/apt-pkg/contrib/mmap.h +++ b/apt-pkg/contrib/mmap.h @@ -50,7 +50,7 @@ class MMap public: enum OpenFlags {NoImmMap = (1<<0),Public = (1<<1),ReadOnly = (1<<2), - UnMapped = (1<<3)}; + UnMapped = (1<<3), Moveable = (1<<4), Fallback = (1 << 5)}; // Simple accessors inline operator void *() {return Base;}; @@ -82,6 +82,8 @@ class DynamicMMap : public MMap FileFd *Fd; unsigned long WorkSpace; + unsigned long const GrowFactor; + unsigned long const Limit; Pool *Pools; unsigned int PoolCount; @@ -96,8 +98,10 @@ class DynamicMMap : public MMap inline unsigned long WriteString(const string &S) {return WriteString(S.c_str(),S.length());}; void UsePools(Pool &P,unsigned int Count) {Pools = &P; PoolCount = Count;}; - DynamicMMap(FileFd &F,unsigned long Flags,unsigned long WorkSpace = 2*1024*1024); - DynamicMMap(unsigned long Flags,unsigned long WorkSpace = 2*1024*1024); + DynamicMMap(FileFd &F,unsigned long Flags,unsigned long const &WorkSpace = 2*1024*1024, + unsigned long const &Grow = 1024*1024, unsigned long const &Limit = 0); + DynamicMMap(unsigned long Flags,unsigned long const &WorkSpace = 2*1024*1024, + unsigned long const &Grow = 1024*1024, unsigned long const &Limit = 0); virtual ~DynamicMMap(); }; diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc index ed7633803..5beb83665 100644 --- a/apt-pkg/deb/debindexfile.cc +++ b/apt-pkg/deb/debindexfile.cc @@ -319,10 +319,11 @@ pkgCache::PkgFileIterator debPackagesIndex::FindInCache(pkgCache &Cache) const // TranslationsIndex::debTranslationsIndex - Contructor /*{{{*/ // --------------------------------------------------------------------- /* */ -debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section) : - pkgIndexFile(true), URI(URI), Dist(Dist), Section(Section) -{ -} +debTranslationsIndex::debTranslationsIndex(string URI,string Dist,string Section, + char const * const Translation) : + pkgIndexFile(true), URI(URI), Dist(Dist), Section(Section), + Language(Translation) +{} /*}}}*/ // TranslationIndex::Trans* - Return the URI to the translation files /*{{{*/ // --------------------------------------------------------------------- @@ -355,8 +356,8 @@ string debTranslationsIndex::IndexURI(const char *Type) const bool debTranslationsIndex::GetIndexes(pkgAcquire *Owner) const { if (TranslationsAvailable()) { - string TranslationFile = "Translation-" + LanguageCode(); - new pkgAcqIndexTrans(Owner, IndexURI(LanguageCode().c_str()), + string const TranslationFile = string("Translation-").append(Language); + new pkgAcqIndexTrans(Owner, IndexURI(Language), Info(TranslationFile.c_str()), TranslationFile); } @@ -375,7 +376,7 @@ string debTranslationsIndex::Describe(bool Short) const snprintf(S,sizeof(S),"%s",Info(TranslationFile().c_str()).c_str()); else snprintf(S,sizeof(S),"%s (%s)",Info(TranslationFile().c_str()).c_str(), - IndexFile(LanguageCode().c_str()).c_str()); + IndexFile(Language).c_str()); return S; } /*}}}*/ @@ -397,20 +398,20 @@ string debTranslationsIndex::Info(const char *Type) const return Info; } /*}}}*/ -bool debTranslationsIndex::HasPackages() const +bool debTranslationsIndex::HasPackages() const /*{{{*/ { if(!TranslationsAvailable()) return false; - return FileExists(IndexFile(LanguageCode().c_str())); + return FileExists(IndexFile(Language)); } - + /*}}}*/ // TranslationsIndex::Exists - Check if the index is available /*{{{*/ // --------------------------------------------------------------------- /* */ bool debTranslationsIndex::Exists() const { - return FileExists(IndexFile(LanguageCode().c_str())); + return FileExists(IndexFile(Language)); } /*}}}*/ // TranslationsIndex::Size - Return the size of the index /*{{{*/ @@ -419,7 +420,7 @@ bool debTranslationsIndex::Exists() const unsigned long debTranslationsIndex::Size() const { struct stat S; - if (stat(IndexFile(LanguageCode().c_str()).c_str(),&S) != 0) + if (stat(IndexFile(Language).c_str(),&S) != 0) return 0; return S.st_size; } @@ -430,7 +431,7 @@ unsigned long debTranslationsIndex::Size() const bool debTranslationsIndex::Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const { // Check the translation file, if in use - string TranslationFile = IndexFile(LanguageCode().c_str()); + string TranslationFile = IndexFile(Language); if (TranslationsAvailable() && FileExists(TranslationFile)) { FileFd Trans(TranslationFile,FileFd::ReadOnly); @@ -462,7 +463,7 @@ bool debTranslationsIndex::Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const /* */ pkgCache::PkgFileIterator debTranslationsIndex::FindInCache(pkgCache &Cache) const { - string FileName = IndexFile(LanguageCode().c_str()); + string FileName = IndexFile(Language); pkgCache::PkgFileIterator File = Cache.FileBegin(); for (; File.end() == false; File++) diff --git a/apt-pkg/deb/debindexfile.h b/apt-pkg/deb/debindexfile.h index b0012c96b..c0e8d7d8e 100644 --- a/apt-pkg/deb/debindexfile.h +++ b/apt-pkg/deb/debindexfile.h @@ -77,12 +77,13 @@ class debTranslationsIndex : public pkgIndexFile string URI; string Dist; string Section; + const char * const Language; string Info(const char *Type) const; string IndexFile(const char *Type) const; string IndexURI(const char *Type) const; - inline string TranslationFile() const {return "Translation-" + LanguageCode();}; + inline string TranslationFile() const {return string("Translation-").append(Language);}; public: @@ -99,7 +100,7 @@ class debTranslationsIndex : public pkgIndexFile virtual bool Merge(pkgCacheGenerator &Gen,OpProgress &Prog) const; virtual pkgCache::PkgFileIterator FindInCache(pkgCache &Cache) const; - debTranslationsIndex(string URI,string Dist,string Section); + debTranslationsIndex(string URI,string Dist,string Section, char const * const Language); }; class debSourcesIndex : public pkgIndexFile diff --git a/apt-pkg/deb/deblistparser.cc b/apt-pkg/deb/deblistparser.cc index 517b771a5..25a1df3f9 100644 --- a/apt-pkg/deb/deblistparser.cc +++ b/apt-pkg/deb/deblistparser.cc @@ -13,6 +13,7 @@ #include <apt-pkg/deblistparser.h> #include <apt-pkg/error.h> #include <apt-pkg/configuration.h> +#include <apt-pkg/aptconfiguration.h> #include <apt-pkg/strutl.h> #include <apt-pkg/crc-16.h> #include <apt-pkg/md5.h> @@ -129,10 +130,11 @@ bool debListParser::NewVersion(pkgCache::VerIterator Ver) only describe package properties */ string debListParser::Description() { - if (DescriptionLanguage().empty()) + string const lang = DescriptionLanguage(); + if (lang.empty()) return Section.FindS("Description"); else - return Section.FindS(("Description-" + pkgIndexFile::LanguageCode()).c_str()); + return Section.FindS(string("Description-").append(lang).c_str()); } /*}}}*/ // ListParser::DescriptionLanguage - Return the description lang string /*{{{*/ @@ -142,7 +144,16 @@ string debListParser::Description() assumed to describe original description. */ string debListParser::DescriptionLanguage() { - return Section.FindS("Description").empty() ? pkgIndexFile::LanguageCode() : ""; + if (Section.FindS("Description").empty() == false) + return ""; + + std::vector<string> const lang = APT::Configuration::getLanguages(); + for (std::vector<string>::const_iterator l = lang.begin(); + l != lang.end(); l++) + if (Section.FindS(string("Description-").append(*l).c_str()).empty() == false) + return *l; + + return ""; } /*}}}*/ // ListParser::Description - Return the description_md5 MD5SumValue /*{{{*/ @@ -384,7 +395,8 @@ const char *debListParser::ConvertRelation(const char *I,unsigned int &Op) bit by bit. */ const char *debListParser::ParseDepends(const char *Start,const char *Stop, string &Package,string &Ver, - unsigned int &Op, bool ParseArchFlags) + unsigned int &Op, bool const &ParseArchFlags, + bool const &StripMultiArch) { // Strip off leading space for (;Start != Stop && isspace(*Start) != 0; Start++); @@ -403,7 +415,14 @@ const char *debListParser::ParseDepends(const char *Start,const char *Stop, // Stash the package name Package.assign(Start,I - Start); - + + // We don't want to confuse library users which can't handle MultiArch + if (StripMultiArch == true) { + size_t const found = Package.rfind(':'); + if (found != string::npos) + Package = Package.substr(0,found); + } + // Skip white space to the '(' for (;I != Stop && isspace(*I) != 0 ; I++); diff --git a/apt-pkg/deb/deblistparser.h b/apt-pkg/deb/deblistparser.h index 34bb29c72..1c709229f 100644 --- a/apt-pkg/deb/deblistparser.h +++ b/apt-pkg/deb/deblistparser.h @@ -64,7 +64,8 @@ class debListParser : public pkgCacheGenerator::ListParser static const char *ParseDepends(const char *Start,const char *Stop, string &Package,string &Ver,unsigned int &Op, - bool ParseArchFlags = false); + bool const &ParseArchFlags = false, + bool const &StripMultiArch = false); static const char *ConvertRelation(const char *I,unsigned int &Op); debListParser(FileFd *File); diff --git a/apt-pkg/deb/debmetaindex.cc b/apt-pkg/deb/debmetaindex.cc index f3ab6960c..8f28f053b 100644 --- a/apt-pkg/deb/debmetaindex.cc +++ b/apt-pkg/deb/debmetaindex.cc @@ -5,6 +5,7 @@ #include <apt-pkg/strutl.h> #include <apt-pkg/acquire-item.h> #include <apt-pkg/configuration.h> +#include <apt-pkg/aptconfiguration.h> #include <apt-pkg/error.h> using namespace std; @@ -170,13 +171,19 @@ bool debReleaseIndex::GetIndexes(pkgAcquire *Owner, bool GetAll) const new indexRecords (Dist)); // Queue the translations + std::vector<std::string> const lang = APT::Configuration::getLanguages(true); for (vector<const debSectionEntry *>::const_iterator I = SectionEntries.begin(); I != SectionEntries.end(); I++) { if((*I)->IsSrc) continue; - debTranslationsIndex i = debTranslationsIndex(URI,Dist,(*I)->Section); - i.GetIndexes(Owner); + + for (vector<string>::const_iterator l = lang.begin(); + l != lang.end(); l++) + { + debTranslationsIndex i = debTranslationsIndex(URI,Dist,(*I)->Section,(*l).c_str()); + i.GetIndexes(Owner); + } } return true; @@ -202,6 +209,7 @@ vector <pkgIndexFile *> *debReleaseIndex::GetIndexFiles() return Indexes; Indexes = new vector <pkgIndexFile*>; + std::vector<std::string> const lang = APT::Configuration::getLanguages(true); for (vector<const debSectionEntry *>::const_iterator I = SectionEntries.begin(); I != SectionEntries.end(); I++) { if ((*I)->IsSrc) @@ -209,7 +217,10 @@ vector <pkgIndexFile *> *debReleaseIndex::GetIndexFiles() else { Indexes->push_back(new debPackagesIndex (URI, Dist, (*I)->Section, IsTrusted())); - Indexes->push_back(new debTranslationsIndex(URI, Dist, (*I)->Section)); + + for (vector<string>::const_iterator l = lang.begin(); + l != lang.end(); l++) + Indexes->push_back(new debTranslationsIndex(URI,Dist,(*I)->Section,(*l).c_str())); } } diff --git a/apt-pkg/deb/debrecords.cc b/apt-pkg/deb/debrecords.cc index 8ed0bb7eb..5b8538a46 100644 --- a/apt-pkg/deb/debrecords.cc +++ b/apt-pkg/deb/debrecords.cc @@ -11,6 +11,7 @@ #include <apt-pkg/debrecords.h> #include <apt-pkg/strutl.h> #include <apt-pkg/error.h> +#include <apt-pkg/aptconfiguration.h> #include <langinfo.h> /*}}}*/ @@ -109,13 +110,18 @@ string debRecordParser::ShortDesc() string debRecordParser::LongDesc() { string orig, dest; - char *codeset = nl_langinfo(CODESET); if (!Section.FindS("Description").empty()) orig = Section.FindS("Description").c_str(); - else - orig = Section.FindS(("Description-" + pkgIndexFile::LanguageCode()).c_str()).c_str(); + else + { + vector<string> const lang = APT::Configuration::getLanguages(); + for (vector<string>::const_iterator l = lang.begin(); + orig.empty() && l != lang.end(); l++) + orig = Section.FindS(string("Description-").append(*l).c_str()); + } + char const * const codeset = nl_langinfo(CODESET); if (strcmp(codeset,"UTF-8") != 0) { UTF8ToCodeset(codeset, orig, &dest); orig = dest; diff --git a/apt-pkg/deb/debsrcrecords.cc b/apt-pkg/deb/debsrcrecords.cc index bde10aa6d..21336e1af 100644 --- a/apt-pkg/deb/debsrcrecords.cc +++ b/apt-pkg/deb/debsrcrecords.cc @@ -54,7 +54,8 @@ const char **debSrcRecordParser::Binaries() package/version records representing the build dependency. The returned array need not be freed and will be reused by the next call to this function */ -bool debSrcRecordParser::BuildDepends(vector<pkgSrcRecords::Parser::BuildDepRec> &BuildDeps, bool ArchOnly) +bool debSrcRecordParser::BuildDepends(vector<pkgSrcRecords::Parser::BuildDepRec> &BuildDeps, + bool const &ArchOnly, bool const &StripMultiArch) { unsigned int I; const char *Start, *Stop; @@ -77,7 +78,7 @@ bool debSrcRecordParser::BuildDepends(vector<pkgSrcRecords::Parser::BuildDepRec> while (1) { Start = debListParser::ParseDepends(Start, Stop, - rec.Package,rec.Version,rec.Op,true); + rec.Package,rec.Version,rec.Op,true, StripMultiArch); if (Start == 0) return _error->Error("Problem parsing dependency: %s", fields[I]); diff --git a/apt-pkg/deb/debsrcrecords.h b/apt-pkg/deb/debsrcrecords.h index a3b5a8286..c39d78bae 100644 --- a/apt-pkg/deb/debsrcrecords.h +++ b/apt-pkg/deb/debsrcrecords.h @@ -30,14 +30,14 @@ class debSrcRecordParser : public pkgSrcRecords::Parser virtual bool Restart() {return Tags.Jump(Sect,0);}; virtual bool Step() {iOffset = Tags.Offset(); return Tags.Step(Sect);}; - virtual bool Jump(unsigned long Off) {iOffset = Off; return Tags.Jump(Sect,Off);}; + virtual bool Jump(unsigned long const &Off) {iOffset = Off; return Tags.Jump(Sect,Off);}; virtual string Package() const {return Sect.FindS("Package");}; virtual string Version() const {return Sect.FindS("Version");}; virtual string Maintainer() const {return Sect.FindS("Maintainer");}; virtual string Section() const {return Sect.FindS("Section");}; virtual const char **Binaries(); - virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps, bool ArchOnly); + virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps, bool const &ArchOnly, bool const &StripMultiArch = true); virtual unsigned long Offset() {return iOffset;}; virtual string AsStr() { @@ -47,7 +47,7 @@ class debSrcRecordParser : public pkgSrcRecords::Parser }; virtual bool Files(vector<pkgSrcRecords::File> &F); - debSrcRecordParser(string File,pkgIndexFile const *Index) + debSrcRecordParser(string const &File,pkgIndexFile const *Index) : Parser(Index), Fd(File,FileFd::ReadOnly), Tags(&Fd,102400), Buffer(0), BufSize(0) {} ~debSrcRecordParser(); diff --git a/apt-pkg/indexfile.cc b/apt-pkg/indexfile.cc index 08f71feb0..37be87055 100644 --- a/apt-pkg/indexfile.cc +++ b/apt-pkg/indexfile.cc @@ -8,9 +8,9 @@ ##################################################################### */ /*}}}*/ // Include Files /*{{{*/ -#include <apt-pkg/configuration.h> #include <apt-pkg/indexfile.h> #include <apt-pkg/error.h> +#include <apt-pkg/aptconfiguration.h> #include <clocale> #include <cstring> @@ -66,28 +66,20 @@ string pkgIndexFile::SourceInfo(pkgSrcRecords::Parser const &Record, return string(); } /*}}}*/ -// IndexFile::TranslationsAvailable - Check if will use Translation /*{{{*/ +// IndexFile::TranslationsAvailable - Check if will use Translation /*{{{*/ // --------------------------------------------------------------------- /* */ -bool pkgIndexFile::TranslationsAvailable() -{ - const string Translation = _config->Find("APT::Acquire::Translation"); - - if (Translation.compare("none") != 0) - return CheckLanguageCode(LanguageCode().c_str()); - else - return false; +bool pkgIndexFile::TranslationsAvailable() { + return (APT::Configuration::getLanguages().empty() != true); } /*}}}*/ -// IndexFile::CheckLanguageCode - Check the Language Code /*{{{*/ +// IndexFile::CheckLanguageCode - Check the Language Code /*{{{*/ // --------------------------------------------------------------------- -/* */ -/* common cases: de_DE, de_DE@euro, de_DE.UTF-8, de_DE.UTF-8@euro, - de_DE.ISO8859-1, tig_ER - more in /etc/gdm/locale.conf -*/ - -bool pkgIndexFile::CheckLanguageCode(const char *Lang) +/* No intern need for this method anymore as the check for correctness + is already done in getLanguages(). Note also that this check is + rather bad (doesn't take three character like ast into account). + TODO: Remove method with next API break */ +__attribute__ ((deprecated)) bool pkgIndexFile::CheckLanguageCode(const char *Lang) { if (strlen(Lang) == 2 || (strlen(Lang) == 5 && Lang[2] == '_')) return true; @@ -98,31 +90,14 @@ bool pkgIndexFile::CheckLanguageCode(const char *Lang) return false; } /*}}}*/ -// IndexFile::LanguageCode - Return the Language Code /*{{{*/ +// IndexFile::LanguageCode - Return the Language Code /*{{{*/ // --------------------------------------------------------------------- -/* return the language code */ -string pkgIndexFile::LanguageCode() -{ - const string Translation = _config->Find("APT::Acquire::Translation"); - - if (Translation.compare("environment") == 0) - { - string lang = std::setlocale(LC_MESSAGES,NULL); - - // we have a mapping of the language codes that contains all the language - // codes that need the country code as well - // (like pt_BR, pt_PT, sv_SE, zh_*, en_*) - const char *need_full_langcode[] = { "pt","sv","zh","en", NULL }; - for(const char **s = need_full_langcode;*s != NULL; s++) - if(lang.find(*s) == 0) - return lang.substr(0,5); - - if(lang.size() > 2) - return lang.substr(0,2); - else - return lang; - } - else - return Translation; +/* As we have now possibly more than one LanguageCode this method is + supersided by a) private classmembers or b) getLanguages(). + TODO: Remove method with next API break */ +__attribute__ ((deprecated)) string pkgIndexFile::LanguageCode() { + if (TranslationsAvailable() == false) + return ""; + return APT::Configuration::getLanguages()[0]; } /*}}}*/ diff --git a/apt-pkg/pkgcache.cc b/apt-pkg/pkgcache.cc index b0ce6e598..997ff51fe 100644 --- a/apt-pkg/pkgcache.cc +++ b/apt-pkg/pkgcache.cc @@ -22,11 +22,11 @@ // Include Files /*{{{*/ #include <apt-pkg/pkgcache.h> #include <apt-pkg/policy.h> -#include <apt-pkg/indexfile.h> #include <apt-pkg/version.h> #include <apt-pkg/error.h> #include <apt-pkg/strutl.h> #include <apt-pkg/configuration.h> +#include <apt-pkg/aptconfiguration.h> #include <apti18n.h> @@ -674,14 +674,22 @@ string pkgCache::PkgFileIterator::RelStr() */ pkgCache::DescIterator pkgCache::VerIterator::TranslatedDescription() const { - pkgCache::DescIterator DescDefault = DescriptionList(); - pkgCache::DescIterator Desc = DescDefault; - for (; Desc.end() == false; Desc++) - if (pkgIndexFile::LanguageCode() == Desc.LanguageCode()) - break; - if (Desc.end() == true) - Desc = DescDefault; - return Desc; + std::vector<string> const lang = APT::Configuration::getLanguages(); + for (std::vector<string>::const_iterator l = lang.begin(); + l != lang.end(); l++) + { + pkgCache::DescIterator DescDefault = DescriptionList(); + pkgCache::DescIterator Desc = DescDefault; + + for (; Desc.end() == false; Desc++) + if (*l == Desc.LanguageCode()) + break; + if (Desc.end() == true) + Desc = DescDefault; + return Desc; + } + + return DescriptionList(); }; /*}}}*/ diff --git a/apt-pkg/pkgcache.h b/apt-pkg/pkgcache.h index 38733713f..e8a3e1064 100644 --- a/apt-pkg/pkgcache.h +++ b/apt-pkg/pkgcache.h @@ -245,7 +245,7 @@ struct pkgCache::VerFile /*{{{*/ map_ptrloc File; // PackageFile map_ptrloc NextFile; // PkgVerFile map_ptrloc Offset; // File offset - unsigned short Size; + unsigned long Size; }; /*}}}*/ struct pkgCache::DescFile /*{{{*/ @@ -253,7 +253,7 @@ struct pkgCache::DescFile /*{{{*/ map_ptrloc File; // PackageFile map_ptrloc NextFile; // PkgVerFile map_ptrloc Offset; // File offset - unsigned short Size; + unsigned long Size; }; /*}}}*/ struct pkgCache::Version /*{{{*/ diff --git a/apt-pkg/srcrecords.cc b/apt-pkg/srcrecords.cc index 5e40ae624..46a02b55c 100644 --- a/apt-pkg/srcrecords.cc +++ b/apt-pkg/srcrecords.cc @@ -77,7 +77,7 @@ bool pkgSrcRecords::Restart() /* This searches on both source package names and output binary names and returns the first found. A 'cursor' like system is used to allow this function to be called multiple times to get successive entries */ -pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool SrcOnly) +pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool const &SrcOnly) { if (Current == Files.end()) return 0; @@ -116,7 +116,7 @@ pkgSrcRecords::Parser *pkgSrcRecords::Find(const char *Package,bool SrcOnly) // Parser::BuildDepType - Convert a build dep to a string /*{{{*/ // --------------------------------------------------------------------- /* */ -const char *pkgSrcRecords::Parser::BuildDepType(unsigned char Type) +const char *pkgSrcRecords::Parser::BuildDepType(unsigned char const &Type) { const char *fields[] = {"Build-Depends", "Build-Depends-Indep", diff --git a/apt-pkg/srcrecords.h b/apt-pkg/srcrecords.h index 99cbc6060..a49533864 100644 --- a/apt-pkg/srcrecords.h +++ b/apt-pkg/srcrecords.h @@ -59,7 +59,7 @@ class pkgSrcRecords virtual bool Restart() = 0; virtual bool Step() = 0; - virtual bool Jump(unsigned long Off) = 0; + virtual bool Jump(unsigned long const &Off) = 0; virtual unsigned long Offset() = 0; virtual string AsStr() = 0; @@ -69,8 +69,8 @@ class pkgSrcRecords virtual string Section() const = 0; virtual const char **Binaries() = 0; // Ownership does not transfer - virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps, bool ArchOnly) = 0; - static const char *BuildDepType(unsigned char Type); + virtual bool BuildDepends(vector<BuildDepRec> &BuildDeps, bool const &ArchOnly, bool const &StripMultiArch = true) = 0; + static const char *BuildDepType(unsigned char const &Type); virtual bool Files(vector<pkgSrcRecords::File> &F) = 0; @@ -90,7 +90,7 @@ class pkgSrcRecords bool Restart(); // Locate a package by name - Parser *Find(const char *Package,bool SrcOnly = false); + Parser *Find(const char *Package,bool const &SrcOnly = false); pkgSrcRecords(pkgSourceList &List); ~pkgSrcRecords(); diff --git a/debian/apt.cron.daily b/debian/apt.cron.daily index b6099ee75..e59b05ee4 100644 --- a/debian/apt.cron.daily +++ b/debian/apt.cron.daily @@ -401,12 +401,16 @@ eval $(apt-config shell BackupArchiveInterval APT::Periodic::BackupArchiveInterv Debdelta=1 eval $(apt-config shell Debdelta APT::Periodic::Download-Upgradeable-Packages-Debdelta) -# check if we actually have to do anything +# check if we actually have to do anything that requires locking the cache if [ $UpdateInterval -eq 0 ] && [ $DownloadUpgradeableInterval -eq 0 ] && [ $UnattendedUpgradeInterval -eq 0 ] && [ $BackupArchiveInterval -eq 0 ] && [ $AutocleanInterval -eq 0 ]; then + + # check cache size + check_size_constraints + exit 0 fi diff --git a/debian/changelog b/debian/changelog index 64e676709..cb46394b5 100644 --- a/debian/changelog +++ b/debian/changelog @@ -1,3 +1,45 @@ +apt (0.7.26) UNRELEASED; urgency=low + + [ David Kalnischkies ] + * [BREAK] add possibility to download and use multiply + Translation files, configurable with Acquire::Translation + (Closes: #444222, #448216, #550564) + * Ignore :qualifiers after package name in build dependencies + for now as long we don't understand them (Closes: #558103) + * doc/apt.conf.5.xml: + - briefly document the behaviour of the new https options + * methods/connect.cc: + - add AI_ADDRCONFIG to ai_flags as suggested by Aurelien Jarno + in response to Bernhard R. Link, thanks! (Closes: #505020) + * methods/rred.cc: + - rewrite to be able to handle even big patch files + - adopt optional mmap+iovec patch from Morten Hustveit + (Closes: #463354) which should speed up a bit. Thanks! + * apt-pkg/contrib/mmap.{cc,h}: + - extend it to have a growable flag - unused now but maybe... + * apt-pkg/pkgcache.h: + - use long instead of short for {Ver,Desc}File size, + patch from Víctor Manuel Jáquez Leal, thanks! (Closes: #538917) + * apt-pkg/acquire-item.cc: + - allow also to skip the last patch if target is reached, + thanks Bernhard R. Link! (Closes: #545699) + * methods/http{,s}.cc + - add config setting for User-Agent to the Acquire group, + thanks Timothy J. Miller! (Closes: #355782) + - add https options which default to http ones (Closes: #557085) + * ftparchive/writer.{cc,h}: + - add APT::FTPArchive::AlwaysStat to disable the too aggressive + caching if versions are build multiply times (not recommend) + Patch by Christoph Goehre, thanks! (Closes: #463260) + * ftparchive/*: + - fix a few typos in strings, comments and manpage, + thanks Karl Goetz! (Closes: #558757) + * debian/apt.cron.daily: + - check cache size even if we do nothing else otherwise, thanks + Francesco Poli for patch(s) and patience! (Closes: #459344) + + -- Michael Vogt <mvo@debian.org> Thu, 10 Dec 2009 22:02:38 +0100 + apt (0.7.25) UNRELEASED; urgency=low [ Christian Perrier ] diff --git a/doc/apt-ftparchive.1.xml b/doc/apt-ftparchive.1.xml index d47df957a..5d538c2c6 100644 --- a/doc/apt-ftparchive.1.xml +++ b/doc/apt-ftparchive.1.xml @@ -285,7 +285,7 @@ <varlistentry><term>Sources</term> <listitem><para> - Sets the output Packages file. Defaults to + Sets the output Sources file. Defaults to <filename>$(DIST)/$(SECTION)/source/Sources</filename></para></listitem> </varlistentry> @@ -544,6 +544,18 @@ for i in Sections do Configuration Item: <literal>APT::FTPArchive::ReadOnlyDB</literal>.</para></listitem> </varlistentry> + <varlistentry><term><option>APT::FTPArchive::AlwaysStat</option></term> + <listitem><para> + &apt-ftparchive; caches as much as possible of metadata in it is cachedb. If packages + are recompiled and/or republished with the same version again, this will lead to problems + as the now outdated cached metadata like size and checksums will be used. With this option + enabled this will no longer happen as it will be checked if the file was changed. + Note that this option is set to "<literal>false</literal>" by default as it is not recommend + to upload multiply versions/builds of a package with the same versionnumber, so in theory + nobody will have these problems and therefore all these extra checks are useless. + </para></listitem> + </varlistentry> + <varlistentry><term><option>APT::FTPArchive::LongDescription</option></term> <listitem><para> This configuration option defaults to "<literal>true</literal>" and should only be set to diff --git a/doc/apt.conf.5.xml b/doc/apt.conf.5.xml index e2db9defb..777630e14 100644 --- a/doc/apt.conf.5.xml +++ b/doc/apt.conf.5.xml @@ -142,7 +142,7 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";}; <listitem><para>Default release to install packages from if more than one version available. Contains release name, codename or release version. Examples: 'stable', 'testing', 'unstable', 'lenny', 'squeeze', '4.0', '5.0*'. See also &apt-preferences;.</para></listitem> </varlistentry> - + <varlistentry><term>Ignore-Hold</term> <listitem><para>Ignore Held packages; This global option causes the problem resolver to ignore held packages in its decision making.</para></listitem> @@ -275,13 +275,20 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";}; <para>The used bandwidth can be limited with <literal>Acquire::http::Dl-Limit</literal> which accepts integer values in kilobyte. The default value is 0 which deactivates the limit and tries uses as much as possible of the bandwidth (Note that this option implicit - deactivates the download from multiple servers at the same time.)</para></listitem> + deactivates the download from multiple servers at the same time.)</para> + + <para><literal>Acquire::http::User-Agent</literal> can be used to set a different + User-Agent for the http download method as some proxies allow access for clients + only if the client uses a known identifier.</para> + </listitem> </varlistentry> <varlistentry><term>https</term> - <listitem><para>HTTPS URIs. Cache-control and proxy options are the same as for - <literal>http</literal> method. - <literal>Pipeline-Depth</literal> option is not supported yet.</para> + <listitem><para>HTTPS URIs. Cache-control, Timeout, AllowRedirect, Dl-Limit and + proxy options are the same as for <literal>http</literal> method and will also + default to the options from the <literal>http</literal> method if they are not + explicitly set for https. <literal>Pipeline-Depth</literal> option is not + supported yet.</para> <para><literal>CaInfo</literal> suboption specifies place of file that holds info about trusted certificates. @@ -385,6 +392,27 @@ DPkg::Pre-Install-Pkgs {"/usr/sbin/dpkg-preconfigure --apt";}; these warnings are most of the time false negatives. Future versions will maybe include a way to really prefer uncompressed files to support the usage of local mirrors.</para></listitem> </varlistentry> + + <varlistentry><term>Languages</term> + <listitem><para>The Languages subsection controls which <filename>Translation</filename> files are downloaded + and in which order APT tries to display the Description-Translations. APT will try to display the first + available Description for the Language which is listed at first. Languages can be defined with their + short or long Languagecodes. Note that not all archives provide <filename>Translation</filename> + files for every Language - especially the long Languagecodes are rare, so please + inform you which ones are available before you set here impossible values.</para> + <para>The default list includes "environment" and "en". "<literal>environment</literal>" has a special meaning here: + It will be replaced at runtime with the languagecodes extracted from the <literal>LC_MESSAGES</literal> enviroment variable. + It will also ensure that these codes are not included twice in the list. If <literal>LC_MESSAGES</literal> + is set to "C" only the <filename>Translation-en</filename> file (if available) will be used. + To force apt to use no Translation file use the setting <literal>Acquire::Languages=none</literal>. "<literal>none</literal>" + is another special meaning code which will stop the search for a fitting <filename>Translation</filename> file. + This can be used by the system administrator to let APT know that it should download also this files without + actually use them if not the environment specifies this languages. So the following example configuration will + result in the order "en, de" in an english and in "de, en" in a german localization. Note that "fr" is downloaded, + but not used if APT is not used in a french localization, in such an environment the order would be "fr, de, en". + <programlisting>Acquire::Languages { "environment"; "de"; "en"; "none"; "fr"; };</programlisting></para></listitem> + </varlistentry> + </variablelist> </para> </refsect1> @@ -976,6 +1004,7 @@ is commented. </listitem> </varlistentry> --> + </variablelist> </refsect1> diff --git a/doc/examples/configure-index b/doc/examples/configure-index index f5f996460..0a20e8f2b 100644 --- a/doc/examples/configure-index +++ b/doc/examples/configure-index @@ -191,19 +191,37 @@ Acquire Max-Age "86400"; // 1 Day age on index files No-Store "false"; // Prevent the cache from storing archives Dl-Limit "7"; // 7Kb/sec maximum download rate + User-Agent "Debian APT-HTTP/1.3"; }; - // HTTPS method configuration: - // - uses the http proxy config - // - uses the http cache-control values - // - uses the http Dl-Limit values - https + + + // HTTPS method configuration: uses the http + // - proxy config + // - cache-control values + // - Dl-Limit, Timout, ... values + // if not set explicit for https + // + // see /usr/share/doc/apt/examples/apt-https-method-example.conf.gz + // for more examples + https { Verify-Peer "false"; SslCert "/etc/apt/some.pem"; - CaPath "/etc/ssl/certs"; - Verify-Host" "true"; - AllowRedirect "true"; + CaPath "/etc/ssl/certs"; + Verify-Host" "true"; + AllowRedirect "true"; + + Timeout "120"; + AllowRedirect "true"; + + // Cache Control. Note these do not work with Squid 2.0.2 + No-Cache "false"; + Max-Age "86400"; // 1 Day age on index files + No-Store "false"; // Prevent the cache from storing archives + Dl-Limit "7"; // 7Kb/sec maximum download rate + + User-Agent "Debian APT-CURL/1.0"; }; ftp @@ -255,6 +273,15 @@ Acquire Order { "gz"; "lzma"; "bz2"; }; }; + + Languages + { + "environment"; + "de"; + "en"; + "none"; + "fr"; + }; }; // Directory layout diff --git a/doc/po/es.po b/doc/po/es.po index ed7ca3d8f..65456e495 100644 --- a/doc/po/es.po +++ b/doc/po/es.po @@ -9867,6 +9867,10 @@ msgid "Which will use the already fetched archives on the disc." msgstr "" #, fuzzy +#~ msgid "<option>APT::FTPArchive::AlwaysStat</option>" +#~ msgstr "<option>--all-versions</option>" + +#, fuzzy #~ msgid "/usr/share/doc/apt/" #~ msgstr "/usr/share/doc/apt/" diff --git a/ftparchive/apt-ftparchive.cc b/ftparchive/apt-ftparchive.cc index d0dea7768..5b6b3940c 100644 --- a/ftparchive/apt-ftparchive.cc +++ b/ftparchive/apt-ftparchive.cc @@ -3,7 +3,7 @@ // $Id: apt-ftparchive.cc,v 1.8.2.3 2004/01/02 22:01:48 mdz Exp $ /* ###################################################################### - apt-scanpackages - Efficient work-alike for dpkg-scanpackages + apt-ftparchive - Efficient work-alike for dpkg-scanpackages Let contents be disabled from the conf @@ -792,7 +792,7 @@ bool Generate(CommandLine &CmdL) if (_config->FindB("APT::FTPArchive::Contents",true) == false) return true; - c1out << "Done Packages, Starting contents." << endl; + c1out << "Packages done, Starting contents." << endl; // Sort the contents file list by date string ArchiveDir = Setup.FindDir("Dir::ArchiveDir"); diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc index dfda827b6..b04244347 100644 --- a/ftparchive/cachedb.cc +++ b/ftparchive/cachedb.cc @@ -26,7 +26,7 @@ // CacheDB::ReadyDB - Ready the DB2 /*{{{*/ // --------------------------------------------------------------------- /* This opens the DB2 file for caching package information */ -bool CacheDB::ReadyDB(string DB) +bool CacheDB::ReadyDB(string const &DB) { int err; @@ -69,7 +69,7 @@ bool CacheDB::ReadyDB(string DB) // apt 0.6.44 if (err == EINVAL) { - _error->Error(_("DB format is invalid. If you upgraded from a older version of apt, please remove and re-create the database.")); + _error->Error(_("DB format is invalid. If you upgraded from an older version of apt, please remove and re-create the database.")); } if (err) { @@ -83,7 +83,7 @@ bool CacheDB::ReadyDB(string DB) return true; } /*}}}*/ -// CacheDB::OpenFile - Open the filei /*{{{*/ +// CacheDB::OpenFile - Open the file /*{{{*/ // --------------------------------------------------------------------- /* */ bool CacheDB::OpenFile() @@ -102,9 +102,9 @@ bool CacheDB::OpenFile() // --------------------------------------------------------------------- /* This gets the size from the database if it's there. If we need * to look at the file, also get the mtime from the file. */ -bool CacheDB::GetFileStat() +bool CacheDB::GetFileStat(bool const &doStat) { - if ((CurStat.Flags & FlSize) == FlSize) + if ((CurStat.Flags & FlSize) == FlSize && doStat == false) { /* Already worked out the file size */ } @@ -139,7 +139,7 @@ bool CacheDB::GetCurStat() if (DBLoaded) { - /* First see if thre is anything about it + /* First see if there is anything about it in the database */ /* Get the flags (and mtime) */ @@ -160,9 +160,9 @@ bool CacheDB::GetCurStat() /*}}}*/ // CacheDB::GetFileInfo - Get all the info about the file /*{{{*/ // --------------------------------------------------------------------- -bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents, - bool GenContentsOnly, - bool DoMD5, bool DoSHA1, bool DoSHA256) +bool CacheDB::GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents, + bool const &GenContentsOnly, bool const &DoMD5, bool const &DoSHA1, + bool const &DoSHA256, bool const &checkMtime) { this->FileName = FileName; @@ -171,14 +171,18 @@ bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents, return false; } OldStat = CurStat; - - if (GetFileStat() == false) + + if (GetFileStat(checkMtime) == false) { delete Fd; Fd = NULL; return false; } + /* if mtime changed, update CurStat from disk */ + if (checkMtime == true && OldStat.mtime != CurStat.mtime) + CurStat.Flags = FlSize; + Stats.Bytes += CurStat.FileSize; Stats.Packages++; @@ -247,7 +251,7 @@ bool CacheDB::LoadControl() // CacheDB::LoadContents - Load the File Listing /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::LoadContents(bool GenOnly) +bool CacheDB::LoadContents(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlContents) == FlContents) @@ -297,7 +301,7 @@ static string bytes2hex(uint8_t *bytes, size_t length) { return string(space); } -static inline unsigned char xdig2num(char dig) { +static inline unsigned char xdig2num(char const &dig) { if (isdigit(dig)) return dig - '0'; if ('a' <= dig && dig <= 'f') return dig - 'a' + 10; if ('A' <= dig && dig <= 'F') return dig - 'A' + 10; @@ -318,7 +322,7 @@ static void hex2bytes(uint8_t *bytes, const char *hex, int length) { // CacheDB::GetMD5 - Get the MD5 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetMD5(bool GenOnly) +bool CacheDB::GetMD5(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlMD5) == FlMD5) @@ -349,7 +353,7 @@ bool CacheDB::GetMD5(bool GenOnly) // CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetSHA1(bool GenOnly) +bool CacheDB::GetSHA1(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlSHA1) == FlSHA1) @@ -380,7 +384,7 @@ bool CacheDB::GetSHA1(bool GenOnly) // CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/ // --------------------------------------------------------------------- /* */ -bool CacheDB::GetSHA256(bool GenOnly) +bool CacheDB::GetSHA256(bool const &GenOnly) { // Try to read the control information out of the DB. if ((CurStat.Flags & FlSHA256) == FlSHA256) diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h index c10f41ecc..0ba80909a 100644 --- a/ftparchive/cachedb.h +++ b/ftparchive/cachedb.h @@ -49,7 +49,7 @@ class CacheDB { return Dbp->get(Dbp,0,&Key,&Data,0) == 0; }; - inline bool Put(const void *In,unsigned long Length) + inline bool Put(const void *In,unsigned long const &Length) { if (ReadOnly == true) return true; @@ -63,13 +63,13 @@ class CacheDB return true; } bool OpenFile(); - bool GetFileStat(); + bool GetFileStat(bool const &doStat = false); bool GetCurStat(); bool LoadControl(); - bool LoadContents(bool GenOnly); - bool GetMD5(bool GenOnly); - bool GetSHA1(bool GenOnly); - bool GetSHA256(bool GenOnly); + bool LoadContents(bool const &GenOnly); + bool GetMD5(bool const &GenOnly); + bool GetSHA1(bool const &GenOnly); + bool GetSHA256(bool const &GenOnly); // Stat info stored in the DB, Fixed types since it is written to disk. enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2), @@ -117,20 +117,20 @@ class CacheDB Stats() : Bytes(0), MD5Bytes(0), SHA1Bytes(0), SHA256Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {}; } Stats; - bool ReadyDB(string DB); + bool ReadyDB(string const &DB); inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;}; inline bool Loaded() {return DBLoaded == true;}; inline off_t GetFileSize(void) {return CurStat.FileSize;} - bool SetFile(string FileName,struct stat St,FileFd *Fd); - bool GetFileInfo(string FileName, bool DoControl, bool DoContents, - bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256); + bool SetFile(string const &FileName,struct stat St,FileFd *Fd); + bool GetFileInfo(string const &FileName, bool const &DoControl, bool const &DoContents, bool const &GenContentsOnly, + bool const &DoMD5, bool const &DoSHA1, bool const &DoSHA256, bool const &checkMtime = false); bool Finish(); bool Clean(); - CacheDB(string DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);}; + CacheDB(string const &DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);}; ~CacheDB() {ReadyDB(string()); delete DebFile;}; }; diff --git a/ftparchive/contents.cc b/ftparchive/contents.cc index 1f2cbcc3d..b761d9204 100644 --- a/ftparchive/contents.cc +++ b/ftparchive/contents.cc @@ -13,7 +13,7 @@ removing the massive sort time overhead. By breaking all the pathnames into components and storing them - separately a space savings is realized by not duplicating the string + separately a space saving is realized by not duplicating the string over and over again. Ultimately this saving is sacrificed to storage of the tree structure itself but the tree structure yields a speed gain in the sorting and processing. Ultimately it takes about 5 seconds to @@ -399,7 +399,7 @@ bool ContentsExtract::TakeContents(const void *NewData,unsigned long Length) // ContentsExtract::Add - Read the contents data into the sorter /*{{{*/ // --------------------------------------------------------------------- /* */ -void ContentsExtract::Add(GenContents &Contents,string Package) +void ContentsExtract::Add(GenContents &Contents,string const &Package) { const char *Start = Data; char *Pkg = Contents.Mystrdup(Package.c_str()); diff --git a/ftparchive/contents.h b/ftparchive/contents.h index d8457cd45..5b5092b66 100644 --- a/ftparchive/contents.h +++ b/ftparchive/contents.h @@ -80,7 +80,7 @@ class ContentsExtract : public pkgDirStream virtual bool DoItem(Item &Itm,int &Fd); void Reset() {CurSize = 0;}; bool TakeContents(const void *Data,unsigned long Length); - void Add(GenContents &Contents,string Package); + void Add(GenContents &Contents,string const &Package); ContentsExtract() : Data(0), MaxSize(0), CurSize(0) {}; virtual ~ContentsExtract() {delete [] Data;}; diff --git a/ftparchive/multicompress.cc b/ftparchive/multicompress.cc index 2fc8efcbf..bb4beedf9 100644 --- a/ftparchive/multicompress.cc +++ b/ftparchive/multicompress.cc @@ -40,14 +40,14 @@ const MultiCompress::CompType MultiCompress::Compressors[] = // MultiCompress::MultiCompress - Constructor /*{{{*/ // --------------------------------------------------------------------- /* Setup the file outputs, compression modes and fork the writer child */ -MultiCompress::MultiCompress(string Output,string Compress, - mode_t Permissions,bool Write) +MultiCompress::MultiCompress(string const &Output,string const &Compress, + mode_t const &Permissions,bool const &Write) : + Permissions(Permissions) { Outputs = 0; Outputter = -1; Input = 0; UpdateMTime = 0; - this->Permissions = Permissions; /* Parse the compression string, a space separated lists of compresison types */ @@ -126,7 +126,7 @@ MultiCompress::~MultiCompress() /* This checks each compressed file to make sure it exists and returns stat information for a random file from the collection. False means one or more of the files is missing. */ -bool MultiCompress::GetStat(string Output,string Compress,struct stat &St) +bool MultiCompress::GetStat(string const &Output,string const &Compress,struct stat &St) { /* Parse the compression string, a space separated lists of compresison types */ @@ -268,8 +268,8 @@ bool MultiCompress::Finalize(unsigned long &OutSize) /* This opens the compressor, either in compress mode or decompress mode. FileFd is always the compressor input/output file, OutFd is the created pipe, Input for Compress, Output for Decompress. */ -bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd, - int &OutFd,bool Comp) +bool MultiCompress::OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd, + int &OutFd,bool const &Comp) { Pid = -1; @@ -365,11 +365,11 @@ bool MultiCompress::CloseOld(int Fd,pid_t Proc) // MultiCompress::Child - The writer child /*{{{*/ // --------------------------------------------------------------------- /* The child process forks a bunch of compression children and takes - input on FD and passes it to all the compressor childer. On the way it + input on FD and passes it to all the compressor child. On the way it computes the MD5 of the raw data. After this the raw data in the original files is compared to see if this data is new. If the data is new then the temp files are renamed, otherwise they are erased. */ -bool MultiCompress::Child(int FD) +bool MultiCompress::Child(int const &FD) { // Start the compression children. for (Files *I = Outputs; I != 0; I = I->Next) diff --git a/ftparchive/multicompress.h b/ftparchive/multicompress.h index a65077e73..3ac3b8fb2 100644 --- a/ftparchive/multicompress.h +++ b/ftparchive/multicompress.h @@ -53,9 +53,9 @@ class MultiCompress mode_t Permissions; static const CompType Compressors[]; - bool OpenCompress(const CompType *Prog,pid_t &Pid,int FileFd, - int &OutFd,bool Comp); - bool Child(int Fd); + bool OpenCompress(const CompType *Prog,pid_t &Pid,int const &FileFd, + int &OutFd,bool const &Comp); + bool Child(int const &Fd); bool Start(); bool Die(); @@ -68,10 +68,10 @@ class MultiCompress bool Finalize(unsigned long &OutSize); bool OpenOld(int &Fd,pid_t &Proc); bool CloseOld(int Fd,pid_t Proc); - static bool GetStat(string Output,string Compress,struct stat &St); + static bool GetStat(string const &Output,string const &Compress,struct stat &St); - MultiCompress(string Output,string Compress,mode_t Permissions, - bool Write = true); + MultiCompress(string const &Output,string const &Compress, + mode_t const &Permissions, bool const &Write = true); ~MultiCompress(); }; diff --git a/ftparchive/override.cc b/ftparchive/override.cc index 6f40bc865..3cf10b89b 100644 --- a/ftparchive/override.cc +++ b/ftparchive/override.cc @@ -24,7 +24,7 @@ // Override::ReadOverride - Read the override file /*{{{*/ // --------------------------------------------------------------------- /* This parses the override file and reads it into the map */ -bool Override::ReadOverride(string File,bool Source) +bool Override::ReadOverride(string const &File,bool const &Source) { if (File.empty() == true) return true; @@ -132,7 +132,7 @@ bool Override::ReadOverride(string File,bool Source) // Override::ReadExtraOverride - Read the extra override file /*{{{*/ // --------------------------------------------------------------------- /* This parses the extra override file and reads it into the map */ -bool Override::ReadExtraOverride(string File,bool Source) +bool Override::ReadExtraOverride(string const &File,bool const &Source) { if (File.empty() == true) return true; @@ -209,9 +209,9 @@ bool Override::ReadExtraOverride(string File,bool Source) /* Returns a override item for the given package and the given architecture. * Treats "all" special */ -Override::Item* Override::GetItem(string Package, string Architecture) +Override::Item* Override::GetItem(string const &Package, string const &Architecture) { - map<string,Item>::iterator I = Mapping.find(Package); + map<string,Item>::const_iterator I = Mapping.find(Package); map<string,Item>::iterator J = Mapping.find(Package + "/" + Architecture); if (I == Mapping.end() && J == Mapping.end()) @@ -230,7 +230,7 @@ Override::Item* Override::GetItem(string Package, string Architecture) if (R->Priority != "") result->Priority = R->Priority; if (R->OldMaint != "") result->OldMaint = R->OldMaint; if (R->NewMaint != "") result->NewMaint = R->NewMaint; - for (map<string,string>::iterator foI = R->FieldOverride.begin(); + for (map<string,string>::const_iterator foI = R->FieldOverride.begin(); foI != R->FieldOverride.end(); foI++) { result->FieldOverride[foI->first] = foI->second; @@ -247,7 +247,7 @@ Override::Item* Override::GetItem(string Package, string Architecture) there is a rule but it does not match then the empty string is returned, also if there was no rewrite rule the empty string is returned. Failed indicates if there was some kind of problem while rewriting. */ -string Override::Item::SwapMaint(string Orig,bool &Failed) +string Override::Item::SwapMaint(string const &Orig,bool &Failed) { Failed = false; @@ -262,10 +262,10 @@ string Override::Item::SwapMaint(string Orig,bool &Failed) override file. Thus it persists.*/ #if 1 // Break OldMaint up into little bits on double slash boundaries. - string::iterator End = OldMaint.begin(); + string::const_iterator End = OldMaint.begin(); while (1) { - string::iterator Start = End; + string::const_iterator Start = End; for (; End < OldMaint.end() && (End + 3 >= OldMaint.end() || End[0] != ' ' || End[1] != '/' || End[2] != '/'); End++); diff --git a/ftparchive/override.h b/ftparchive/override.h index f270556eb..c5cacc2b4 100644 --- a/ftparchive/override.h +++ b/ftparchive/override.h @@ -31,20 +31,20 @@ class Override string NewMaint; map<string,string> FieldOverride; - string SwapMaint(string Orig,bool &Failed); + string SwapMaint(string const &Orig,bool &Failed); ~Item() {}; }; map<string,Item> Mapping; - inline Item *GetItem(string Package) + inline Item *GetItem(string const &Package) { return GetItem(Package, ""); } - Item *GetItem(string Package, string Architecture); + Item *GetItem(string const &Package, string const &Architecture); - bool ReadOverride(string File,bool Source = false); - bool ReadExtraOverride(string File,bool Source = false); + bool ReadOverride(string const &File,bool const &Source = false); + bool ReadExtraOverride(string const &File,bool const &Source = false); }; #endif diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc index b2ebdca8a..5547c6aa5 100644 --- a/ftparchive/writer.cc +++ b/ftparchive/writer.cc @@ -89,7 +89,7 @@ int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag) // FTWScanner::ScannerFile - File Scanner /*{{{*/ // --------------------------------------------------------------------- /* */ -int FTWScanner::ScannerFile(const char *File, bool ReadLink) +int FTWScanner::ScannerFile(const char *File, bool const &ReadLink) { const char *LastComponent = strrchr(File, '/'); if (LastComponent == NULL) @@ -97,7 +97,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) else LastComponent++; - vector<string>::iterator I; + vector<string>::const_iterator I; for(I = Owner->Patterns.begin(); I != Owner->Patterns.end(); ++I) { if (fnmatch((*I).c_str(), LastComponent, 0) == 0) @@ -127,7 +127,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) { Owner->NewLine(1); - bool Type = _error->PopMessage(Err); + bool const Type = _error->PopMessage(Err); if (Type == true) cerr << _("E: ") << Err << endl; else @@ -148,7 +148,7 @@ int FTWScanner::ScannerFile(const char *File, bool ReadLink) // FTWScanner::RecursiveScan - Just scan a directory tree /*{{{*/ // --------------------------------------------------------------------- /* */ -bool FTWScanner::RecursiveScan(string Dir) +bool FTWScanner::RecursiveScan(string const &Dir) { /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ @@ -161,7 +161,7 @@ bool FTWScanner::RecursiveScan(string Dir) // Do recursive directory searching Owner = this; - int Res = ftw(Dir.c_str(),ScannerFTW,30); + int const Res = ftw(Dir.c_str(),ScannerFTW,30); // Error treewalking? if (Res != 0) @@ -178,7 +178,7 @@ bool FTWScanner::RecursiveScan(string Dir) // --------------------------------------------------------------------- /* This is an alternative to using FTW to locate files, it reads the list of files from another file. */ -bool FTWScanner::LoadFileList(string Dir,string File) +bool FTWScanner::LoadFileList(string const &Dir, string const &File) { /* If noprefix is set then jam the scan root in, so we don't generate link followed paths out of control */ @@ -236,7 +236,7 @@ bool FTWScanner::LoadFileList(string Dir,string File) /* */ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, unsigned long &DeLinkBytes, - off_t FileSize) + off_t const &FileSize) { // See if this isn't an internaly prefix'd file name. if (InternalPrefix.empty() == false && @@ -293,8 +293,8 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath, // PackagesWriter::PackagesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, - string aArch) : +PackagesWriter::PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides, + string const &aArch) : Db(DB),Stats(Db.Stats), Arch(aArch) { Output = stdout; @@ -306,6 +306,7 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, DoMD5 = _config->FindB("APT::FTPArchive::MD5",true); DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true); DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true); + DoAlwaysStat = _config->FindB("APT::FTPArchive::AlwaysStat", false); DoContents = _config->FindB("APT::FTPArchive::Contents",true); NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false); LongDescription = _config->FindB("APT::FTPArchive::LongDescription",true); @@ -328,7 +329,7 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides, // FTWScanner::SetExts - Set extensions to support /*{{{*/ // --------------------------------------------------------------------- /* */ -bool FTWScanner::SetExts(string Vals) +bool FTWScanner::SetExts(string const &Vals) { ClearPatterns(); string::size_type Start = 0; @@ -360,7 +361,7 @@ bool FTWScanner::SetExts(string Vals) bool PackagesWriter::DoPackage(string FileName) { // Pull all the data we need form the DB - if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256) + if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256, DoAlwaysStat) == false) { return false; @@ -463,7 +464,7 @@ bool PackagesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); /* Get rid of the Optional tag. This is an ugly, ugly, ugly hack that - dpkg-scanpackages does.. Well sort of. dpkg-scanpackages just does renaming + dpkg-scanpackages does. Well sort of. dpkg-scanpackages just does renaming but dpkg does this append bit. So we do the append bit, at least that way the status file and package file will remain similar. There are other transforms but optional is the only legacy one still in use for some lazy reason. */ @@ -475,7 +476,7 @@ bool PackagesWriter::DoPackage(string FileName) SetTFRewriteData(Changes[End++], "Suggests", OptionalStr.c_str()); } - for (map<string,string>::iterator I = OverItem->FieldOverride.begin(); + for (map<string,string>::const_iterator I = OverItem->FieldOverride.begin(); I != OverItem->FieldOverride.end(); I++) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); @@ -493,8 +494,8 @@ bool PackagesWriter::DoPackage(string FileName) // SourcesWriter::SourcesWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -SourcesWriter::SourcesWriter(string BOverrides,string SOverrides, - string ExtOverrides) +SourcesWriter::SourcesWriter(string const &BOverrides,string const &SOverrides, + string const &ExtOverrides) { Output = stdout; AddPattern("*.dsc"); @@ -719,7 +720,7 @@ bool SourcesWriter::DoPackage(string FileName) if (NewMaint.empty() == false) SetTFRewriteData(Changes[End++], "Maintainer", NewMaint.c_str()); - for (map<string,string>::iterator I = SOverItem->FieldOverride.begin(); + for (map<string,string>::const_iterator I = SOverItem->FieldOverride.begin(); I != SOverItem->FieldOverride.end(); I++) SetTFRewriteData(Changes[End++],I->first.c_str(),I->second.c_str()); @@ -739,7 +740,7 @@ bool SourcesWriter::DoPackage(string FileName) // ContentsWriter::ContentsWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ContentsWriter::ContentsWriter(string DB) : +ContentsWriter::ContentsWriter(string const &DB) : Db(DB), Stats(Db.Stats) { @@ -751,9 +752,9 @@ ContentsWriter::ContentsWriter(string DB) : // --------------------------------------------------------------------- /* If Package is the empty string the control record will be parsed to determine what the package name is. */ -bool ContentsWriter::DoPackage(string FileName,string Package) +bool ContentsWriter::DoPackage(string FileName, string Package) { - if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false)) + if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false, false)) { return false; } @@ -772,7 +773,7 @@ bool ContentsWriter::DoPackage(string FileName,string Package) // ContentsWriter::ReadFromPkgs - Read from a packages file /*{{{*/ // --------------------------------------------------------------------- /* */ -bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress) +bool ContentsWriter::ReadFromPkgs(string const &PkgFile,string const &PkgCompress) { MultiCompress Pkgs(PkgFile,PkgCompress,0,false); if (_error->PendingError() == true) @@ -827,7 +828,7 @@ bool ContentsWriter::ReadFromPkgs(string PkgFile,string PkgCompress) // ReleaseWriter::ReleaseWriter - Constructor /*{{{*/ // --------------------------------------------------------------------- /* */ -ReleaseWriter::ReleaseWriter(string DB) +ReleaseWriter::ReleaseWriter(string const &DB) { AddPattern("Packages"); AddPattern("Packages.gz"); @@ -841,7 +842,7 @@ ReleaseWriter::ReleaseWriter(string DB) AddPattern("md5sum.txt"); Output = stdout; - time_t now = time(NULL); + time_t const now = time(NULL); char datestr[128]; if (strftime(datestr, sizeof(datestr), "%a, %d %b %Y %H:%M:%S UTC", gmtime(&now)) == 0) @@ -928,7 +929,7 @@ bool ReleaseWriter::DoPackage(string FileName) void ReleaseWriter::Finish() { fprintf(Output, "MD5Sum:\n"); - for(map<string,struct CheckSum>::iterator I = CheckSums.begin(); + for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { @@ -939,7 +940,7 @@ void ReleaseWriter::Finish() } fprintf(Output, "SHA1:\n"); - for(map<string,struct CheckSum>::iterator I = CheckSums.begin(); + for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { @@ -950,7 +951,7 @@ void ReleaseWriter::Finish() } fprintf(Output, "SHA256:\n"); - for(map<string,struct CheckSum>::iterator I = CheckSums.begin(); + for(map<string,struct CheckSum>::const_iterator I = CheckSums.begin(); I != CheckSums.end(); ++I) { diff --git a/ftparchive/writer.h b/ftparchive/writer.h index e76438900..ad58dee0a 100644 --- a/ftparchive/writer.h +++ b/ftparchive/writer.h @@ -43,12 +43,12 @@ class FTWScanner static FTWScanner *Owner; static int ScannerFTW(const char *File,const struct stat *sb,int Flag); - static int ScannerFile(const char *File, bool ReadLink); + static int ScannerFile(const char *File, bool const &ReadLink); bool Delink(string &FileName,const char *OriginalPath, - unsigned long &Bytes,off_t FileSize); + unsigned long &Bytes,off_t const &FileSize); - inline void NewLine(unsigned Priority) + inline void NewLine(unsigned const &Priority) { if (ErrorPrinted == false && Quiet <= Priority) { @@ -63,11 +63,11 @@ class FTWScanner string InternalPrefix; virtual bool DoPackage(string FileName) = 0; - bool RecursiveScan(string Dir); - bool LoadFileList(string BaseDir,string File); + bool RecursiveScan(string const &Dir); + bool LoadFileList(string const &BaseDir,string const &File); void ClearPatterns() { Patterns.clear(); }; - void AddPattern(string Pattern) { Patterns.push_back(Pattern); }; - bool SetExts(string Vals); + void AddPattern(string const &Pattern) { Patterns.push_back(Pattern); }; + bool SetExts(string const &Vals); FTWScanner(); virtual ~FTWScanner() {delete [] RealPath;}; @@ -84,6 +84,7 @@ class PackagesWriter : public FTWScanner bool DoMD5; bool DoSHA1; bool DoSHA256; + bool DoAlwaysStat; bool NoOverride; bool DoContents; bool LongDescription; @@ -95,13 +96,13 @@ class PackagesWriter : public FTWScanner struct CacheDB::Stats &Stats; string Arch; - inline bool ReadOverride(string File) {return Over.ReadOverride(File);}; - inline bool ReadExtraOverride(string File) + inline bool ReadOverride(string const &File) {return Over.ReadOverride(File);}; + inline bool ReadExtraOverride(string const &File) {return Over.ReadExtraOverride(File);}; virtual bool DoPackage(string FileName); - PackagesWriter(string DB,string Overrides,string ExtOverrides=string(), - string Arch=string()); + PackagesWriter(string const &DB,string const &Overrides,string const &ExtOverrides=string(), + string const &Arch=string()); virtual ~PackagesWriter() {}; }; @@ -121,12 +122,12 @@ class ContentsWriter : public FTWScanner bool DoPackage(string FileName,string Package); virtual bool DoPackage(string FileName) {return DoPackage(FileName,string());}; - bool ReadFromPkgs(string PkgFile,string PkgCompress); + bool ReadFromPkgs(string const &PkgFile,string const &PkgCompress); void Finish() {Gen.Print(Output);}; - inline bool ReadyDB(string DB) {return Db.ReadyDB(DB);}; + inline bool ReadyDB(string const &DB) {return Db.ReadyDB(DB);}; - ContentsWriter(string DB); + ContentsWriter(string const &DB); virtual ~ContentsWriter() {}; }; @@ -149,15 +150,15 @@ class SourcesWriter : public FTWScanner virtual bool DoPackage(string FileName); - SourcesWriter(string BOverrides,string SOverrides, - string ExtOverrides=string()); + SourcesWriter(string const &BOverrides,string const &SOverrides, + string const &ExtOverrides=string()); virtual ~SourcesWriter() {free(Buffer);}; }; class ReleaseWriter : public FTWScanner { public: - ReleaseWriter(string DB); + ReleaseWriter(string const &DB); virtual bool DoPackage(string FileName); void Finish(); diff --git a/methods/connect.cc b/methods/connect.cc index 74e670ebd..adb16a199 100644 --- a/methods/connect.cc +++ b/methods/connect.cc @@ -158,6 +158,7 @@ bool Connect(string Host,int Port,const char *Service,int DefPort,int &Fd, struct addrinfo Hints; memset(&Hints,0,sizeof(Hints)); Hints.ai_socktype = SOCK_STREAM; + Hints.ai_flags = AI_ADDRCONFIG; Hints.ai_protocol = 0; // if we couldn't resolve the host before, we don't try now diff --git a/methods/http.cc b/methods/http.cc index 3b210f6b6..2dae87a02 100644 --- a/methods/http.cc +++ b/methods/http.cc @@ -731,7 +731,8 @@ void HttpMethod::SendReq(FetchItem *Itm,CircleBuf &Out) Req += string("Authorization: Basic ") + Base64Encode(Uri.User + ":" + Uri.Password) + "\r\n"; } - Req += "User-Agent: Debian APT-HTTP/1.3 ("VERSION")\r\n\r\n"; + Req += "User-Agent: " + _config->Find("Acquire::http::User-Agent", + "Debian APT-HTTP/1.3 ("VERSION")") + "\r\n\r\n"; if (Debug == true) cerr << Req << endl; diff --git a/methods/https.cc b/methods/https.cc index 86d7f3a6b..726f53c44 100644 --- a/methods/https.cc +++ b/methods/https.cc @@ -1,4 +1,4 @@ -// -*- mode: cpp; mode: fold -*- +//-*- mode: cpp; mode: fold -*- // Description /*{{{*/ // $Id: http.cc,v 1.59 2004/05/08 19:42:35 mdz Exp $ /* ###################################################################### @@ -57,54 +57,38 @@ HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow, return 0; } -void HttpsMethod::SetupProxy() -{ - URI ServerName = Queue->Uri; - - // Determine the proxy setting - string SpecificProxy = _config->Find("Acquire::http::Proxy::" + ServerName.Host); - if (!SpecificProxy.empty()) - { - if (SpecificProxy == "DIRECT") - Proxy = ""; - else - Proxy = SpecificProxy; - } - else - { - string DefProxy = _config->Find("Acquire::http::Proxy"); - if (!DefProxy.empty()) - { - Proxy = DefProxy; - } - else - { - char* result = getenv("http_proxy"); - Proxy = result ? result : ""; - } - } - - // Parse no_proxy, a , separated list of domains - if (getenv("no_proxy") != 0) - { - if (CheckDomainList(ServerName.Host,getenv("no_proxy")) == true) - Proxy = ""; - } - - // Determine what host and port to use based on the proxy settings - string Host; - if (Proxy.empty() == true || Proxy.Host.empty() == true) - { - } - else - { - if (Proxy.Port != 0) - curl_easy_setopt(curl, CURLOPT_PROXYPORT, Proxy.Port); - curl_easy_setopt(curl, CURLOPT_PROXY, Proxy.Host.c_str()); - } -} - - +void HttpsMethod::SetupProxy() { /*{{{*/ + URI ServerName = Queue->Uri; + + // Determine the proxy setting - try https first, fallback to http and use env at last + string UseProxy = _config->Find("Acquire::https::Proxy::" + ServerName.Host, + _config->Find("Acquire::http::Proxy::" + ServerName.Host)); + + if (UseProxy.empty() == true) + UseProxy = _config->Find("Acquire::https::Proxy", _config->Find("Acquire::http::Proxy")); + + // User want to use NO proxy, so nothing to setup + if (UseProxy == "DIRECT") + return; + + if (UseProxy.empty() == false) { + // Parse no_proxy, a comma (,) separated list of domains we don't want to use + // a proxy for so we stop right here if it is in the list + if (getenv("no_proxy") != 0 && CheckDomainList(ServerName.Host,getenv("no_proxy")) == true) + return; + } else { + const char* result = getenv("http_proxy"); + UseProxy = result == NULL ? "" : result; + } + + // Determine what host and port to use based on the proxy settings + if (UseProxy.empty() == false) { + Proxy = UseProxy; + if (Proxy.Port != 1) + curl_easy_setopt(curl, CURLOPT_PROXYPORT, Proxy.Port); + curl_easy_setopt(curl, CURLOPT_PROXY, Proxy.Host.c_str()); + } +} /*}}}*/ // HttpsMethod::Fetch - Fetch an item /*{{{*/ // --------------------------------------------------------------------- /* This adds an item to the pipeline. We keep the pipeline at a fixed @@ -191,12 +175,15 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_SSLVERSION, final_version); // cache-control - if(_config->FindB("Acquire::http::No-Cache",false) == false) + if(_config->FindB("Acquire::https::No-Cache", + _config->FindB("Acquire::http::No-Cache",false)) == false) { // cache enabled - if (_config->FindB("Acquire::http::No-Store",false) == true) + if (_config->FindB("Acquire::https::No-Store", + _config->FindB("Acquire::http::No-Store",false)) == true) headers = curl_slist_append(headers,"Cache-Control: no-store"); - ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::http::Max-Age",0)); + ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::https::Max-Age", + _config->FindI("Acquire::http::Max-Age",0))); headers = curl_slist_append(headers, ss.str().c_str()); } else { // cache disabled by user @@ -206,22 +193,29 @@ bool HttpsMethod::Fetch(FetchItem *Itm) curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers); // speed limit - int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024; + int dlLimit = _config->FindI("Acquire::https::Dl-Limit", + _config->FindI("Acquire::http::Dl-Limit",0))*1024; if (dlLimit > 0) curl_easy_setopt(curl, CURLOPT_MAX_RECV_SPEED_LARGE, dlLimit); // set header - curl_easy_setopt(curl, CURLOPT_USERAGENT,"Debian APT-CURL/1.0 ("VERSION")"); + curl_easy_setopt(curl, CURLOPT_USERAGENT, + _config->Find("Acquire::https::User-Agent", + _config->Find("Acquire::http::User-Agent", + "Debian APT-CURL/1.0 ("VERSION")")).c_str()); // set timeout - int timeout = _config->FindI("Acquire::http::Timeout",120); + int timeout = _config->FindI("Acquire::https::Timeout", + _config->FindI("Acquire::http::Timeout",120)); + curl_easy_setopt(curl, CURLOPT_TIMEOUT, timeout); curl_easy_setopt(curl, CURLOPT_CONNECTTIMEOUT, timeout); //set really low lowspeed timeout (see #497983) curl_easy_setopt(curl, CURLOPT_LOW_SPEED_LIMIT, DL_MIN_SPEED); curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout); // set redirect options and default to 10 redirects - bool AllowRedirect = _config->FindI("Acquire::https::AllowRedirect", true); + bool AllowRedirect = _config->FindB("Acquire::https::AllowRedirect", + _config->FindB("Acquire::http::AllowRedirect",true)); curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect); curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10); diff --git a/methods/rred.cc b/methods/rred.cc index 27d95bdde..262c78cab 100644 --- a/methods/rred.cc +++ b/methods/rred.cc @@ -1,202 +1,443 @@ +// Includes /*{{{*/ #include <apt-pkg/fileutl.h> +#include <apt-pkg/mmap.h> #include <apt-pkg/error.h> #include <apt-pkg/acquire-method.h> #include <apt-pkg/strutl.h> #include <apt-pkg/hashes.h> #include <sys/stat.h> +#include <sys/uio.h> #include <unistd.h> #include <utime.h> #include <stdio.h> #include <errno.h> #include <apti18n.h> - -/* this method implements a patch functionality similar to "patch --ed" that is - * used by the "tiffany" incremental packages download stuff. it differs from - * "ed" insofar that it is way more restricted (and therefore secure). in the - * moment only the "c", "a" and "d" commands of ed are implemented (diff - * doesn't output any other). additionally the records must be reverse sorted - * by line number and may not overlap (diff *seems* to produce this kind of - * output). + /*}}}*/ +/** \brief RredMethod - ed-style incremential patch method {{{ + * + * This method implements a patch functionality similar to "patch --ed" that is + * used by the "tiffany" incremental packages download stuff. It differs from + * "ed" insofar that it is way more restricted (and therefore secure). + * The currently supported ed commands are "<em>c</em>hange", "<em>a</em>dd" and + * "<em>d</em>elete" (diff doesn't output any other). + * Additionally the records must be reverse sorted by line number and + * may not overlap (diff *seems* to produce this kind of output). * */ +class RredMethod : public pkgAcqMethod { + bool Debug; + // the size of this doesn't really matter (except for performance) + const static int BUF_SIZE = 1024; + // the supported ed commands + enum Mode {MODE_CHANGED='c', MODE_DELETED='d', MODE_ADDED='a'}; + // return values + enum State {ED_OK, ED_ORDERING, ED_PARSER, ED_FAILURE, MMAP_FAILED}; -const char *Prog; + State applyFile(FILE *ed_cmds, FILE *in_file, FILE *out_file, + unsigned long &line, char *buffer, Hashes *hash) const; + void ignoreLineInFile(FILE *fin, char *buffer) const; + void copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines, + Hashes *hash, char *buffer) const; -class RredMethod : public pkgAcqMethod -{ - bool Debug; - // the size of this doesn't really matter (except for performance) - const static int BUF_SIZE = 1024; - // the ed commands - enum Mode {MODE_CHANGED, MODE_DELETED, MODE_ADDED}; - // return values - enum State {ED_OK, ED_ORDERING, ED_PARSER, ED_FAILURE}; - // this applies a single hunk, it uses a tail recursion to - // reverse the hunks in the file - int ed_rec(FILE *ed_cmds, FILE *in_file, FILE *out_file, int line, - char *buffer, unsigned int bufsize, Hashes *hash); - // apply a patch file - int ed_file(FILE *ed_cmds, FILE *in_file, FILE *out_file, Hashes *hash); - // the methods main method - virtual bool Fetch(FetchItem *Itm); - - public: - - RredMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {}; + State patchFile(FileFd &Patch, FileFd &From, FileFd &out_file, Hashes *hash) const; + State patchMMap(FileFd &Patch, FileFd &From, FileFd &out_file, Hashes *hash) const; + +protected: + // the methods main method + virtual bool Fetch(FetchItem *Itm); + +public: + RredMethod() : pkgAcqMethod("1.1",SingleInstance | SendConfig) {}; }; + /*}}}*/ +/** \brief applyFile - in reverse order with a tail recursion {{{ + * + * As it is expected that the commands are in reversed order in the patch file + * we check in the first half if the command is valid, but doesn't execute it + * and move a step deeper. After reaching the end of the file we apply the + * patches in the correct order: last found command first. + * + * \param ed_cmds patch file to apply + * \param in_file base file we want to patch + * \param out_file file to write the patched result to + * \param line of command operation + * \param buffer internal used read/write buffer + * \param hash the created file for correctness + * \return the success State of the ed command executor + */ +RredMethod::State RredMethod::applyFile(FILE *ed_cmds, FILE *in_file, FILE *out_file, + unsigned long &line, char *buffer, Hashes *hash) const { + // get the current command and parse it + if (fgets(buffer, BUF_SIZE, ed_cmds) == NULL) { + if (Debug == true) + std::clog << "rred: encounter end of file - we can start patching now." << std::endl; + line = 0; + return ED_OK; + } -int RredMethod::ed_rec(FILE *ed_cmds, FILE *in_file, FILE *out_file, int line, - char *buffer, unsigned int bufsize, Hashes *hash) { - int pos; - int startline; - int stopline; - int mode; - int written; - char *idx; - - /* get the current command and parse it*/ - if (fgets(buffer, bufsize, ed_cmds) == NULL) { - return line; - } - startline = strtol(buffer, &idx, 10); - if (startline < line) { - return ED_ORDERING; - } - if (*idx == ',') { - idx++; - stopline = strtol(idx, &idx, 10); - } - else { - stopline = startline; - } - if (*idx == 'c') { - mode = MODE_CHANGED; - if (Debug == true) { - std::clog << "changing from line " << startline - << " to " << stopline << std::endl; - } - } - else if (*idx == 'a') { - mode = MODE_ADDED; - if (Debug == true) { - std::clog << "adding after line " << startline << std::endl; - } - } - else if (*idx == 'd') { - mode = MODE_DELETED; - if (Debug == true) { - std::clog << "deleting from line " << startline - << " to " << stopline << std::endl; - } - } - else { - return ED_PARSER; - } - /* get the current position */ - pos = ftell(ed_cmds); - /* if this is add or change then go to the next full stop */ - if ((mode == MODE_CHANGED) || (mode == MODE_ADDED)) { - do { - fgets(buffer, bufsize, ed_cmds); - while ((strlen(buffer) == (bufsize - 1)) - && (buffer[bufsize - 2] != '\n')) { - fgets(buffer, bufsize, ed_cmds); - buffer[0] = ' '; - } - } while (strncmp(buffer, ".", 1) != 0); - } - /* do the recursive call */ - line = ed_rec(ed_cmds, in_file, out_file, line, buffer, bufsize, - hash); - /* pass on errors */ - if (line < 0) { - return line; - } - /* apply our hunk */ - fseek(ed_cmds, pos, SEEK_SET); - /* first wind to the current position */ - if (mode != MODE_ADDED) { - startline -= 1; - } - while (line < startline) { - fgets(buffer, bufsize, in_file); - written = fwrite(buffer, 1, strlen(buffer), out_file); - hash->Add((unsigned char*)buffer, written); - while ((strlen(buffer) == (bufsize - 1)) - && (buffer[bufsize - 2] != '\n')) { - fgets(buffer, bufsize, in_file); - written = fwrite(buffer, 1, strlen(buffer), out_file); - hash->Add((unsigned char*)buffer, written); - } - line++; - } - /* include from ed script */ - if ((mode == MODE_ADDED) || (mode == MODE_CHANGED)) { - do { - fgets(buffer, bufsize, ed_cmds); - if (strncmp(buffer, ".", 1) != 0) { - written = fwrite(buffer, 1, strlen(buffer), out_file); - hash->Add((unsigned char*)buffer, written); - while ((strlen(buffer) == (bufsize - 1)) - && (buffer[bufsize - 2] != '\n')) { - fgets(buffer, bufsize, ed_cmds); - written = fwrite(buffer, 1, strlen(buffer), out_file); - hash->Add((unsigned char*)buffer, written); - } - } - else { - break; - } - } while (1); - } - /* ignore the corresponding number of lines from input */ - if ((mode == MODE_DELETED) || (mode == MODE_CHANGED)) { - while (line < stopline) { - fgets(buffer, bufsize, in_file); - while ((strlen(buffer) == (bufsize - 1)) - && (buffer[bufsize - 2] != '\n')) { - fgets(buffer, bufsize, in_file); - } - line++; - } - } - return line; -} + // parse in the effected linenumbers + char* idx; + errno=0; + unsigned long const startline = strtol(buffer, &idx, 10); + if (errno == ERANGE || errno == EINVAL) { + _error->Errno("rred", "startline is an invalid number"); + return ED_PARSER; + } + if (startline > line) { + _error->Error("rred: The start line (%lu) of the next command is higher than the last line (%lu). This is not allowed.", startline, line); + return ED_ORDERING; + } + unsigned long stopline; + if (*idx == ',') { + idx++; + errno=0; + stopline = strtol(idx, &idx, 10); + if (errno == ERANGE || errno == EINVAL) { + _error->Errno("rred", "stopline is an invalid number"); + return ED_PARSER; + } + } + else { + stopline = startline; + } + line = startline; + + // which command to execute on this line(s)? + switch (*idx) { + case MODE_CHANGED: + if (Debug == true) + std::clog << "Change from line " << startline << " to " << stopline << std::endl; + break; + case MODE_ADDED: + if (Debug == true) + std::clog << "Insert after line " << startline << std::endl; + break; + case MODE_DELETED: + if (Debug == true) + std::clog << "Delete from line " << startline << " to " << stopline << std::endl; + break; + default: + _error->Error("rred: Unknown ed command '%c'. Abort.", *idx); + return ED_PARSER; + } + unsigned char mode = *idx; + + // save the current position + unsigned const long pos = ftell(ed_cmds); + + // if this is add or change then go to the next full stop + unsigned int data_length = 0; + if (mode == MODE_CHANGED || mode == MODE_ADDED) { + do { + ignoreLineInFile(ed_cmds, buffer); + data_length++; + } + while (strncmp(buffer, ".", 1) != 0); + data_length--; // the dot should not be copied + } + + // do the recursive call - the last command is the one we need to execute at first + const State child = applyFile(ed_cmds, in_file, out_file, line, buffer, hash); + if (child != ED_OK) { + return child; + } + + // change and delete are working on "line" - add is done after "line" + if (mode != MODE_ADDED) + line++; + + // first wind to the current position and copy over all unchanged lines + if (line < startline) { + copyLinesFromFileToFile(in_file, out_file, (startline - line), hash, buffer); + line = startline; + } -int RredMethod::ed_file(FILE *ed_cmds, FILE *in_file, FILE *out_file, - Hashes *hash) { + if (mode != MODE_ADDED) + line--; + + // include data from ed script + if (mode == MODE_CHANGED || mode == MODE_ADDED) { + fseek(ed_cmds, pos, SEEK_SET); + copyLinesFromFileToFile(ed_cmds, out_file, data_length, hash, buffer); + } + + // ignore the corresponding number of lines from input + if (mode == MODE_CHANGED || mode == MODE_DELETED) { + while (line < stopline) { + ignoreLineInFile(in_file, buffer); + line++; + } + } + return ED_OK; +} + /*}}}*/ +void RredMethod::copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines,/*{{{*/ + Hashes *hash, char *buffer) const { + while (0 < lines--) { + do { + fgets(buffer, BUF_SIZE, fin); + size_t const written = fwrite(buffer, 1, strlen(buffer), fout); + hash->Add((unsigned char*)buffer, written); + } while (strlen(buffer) == (BUF_SIZE - 1) && + buffer[BUF_SIZE - 2] != '\n'); + } +} + /*}}}*/ +void RredMethod::ignoreLineInFile(FILE *fin, char *buffer) const { /*{{{*/ + fgets(buffer, BUF_SIZE, fin); + while (strlen(buffer) == (BUF_SIZE - 1) && + buffer[BUF_SIZE - 2] != '\n') { + fgets(buffer, BUF_SIZE, fin); + buffer[0] = ' '; + } +} + /*}}}*/ +RredMethod::State RredMethod::patchFile(FileFd &Patch, FileFd &From, /*{{{*/ + FileFd &out_file, Hashes *hash) const { char buffer[BUF_SIZE]; - int result; - int written; - + FILE* fFrom = fdopen(From.Fd(), "r"); + FILE* fPatch = fdopen(Patch.Fd(), "r"); + FILE* fTo = fdopen(out_file.Fd(), "w"); + /* we do a tail recursion to read the commands in the right order */ - result = ed_rec(ed_cmds, in_file, out_file, 0, buffer, BUF_SIZE, - hash); + unsigned long line = -1; // assign highest possible value + State const result = applyFile(fPatch, fFrom, fTo, line, buffer, hash); /* read the rest from infile */ - if (result >= 0) { - while (fgets(buffer, BUF_SIZE, in_file) != NULL) { - written = fwrite(buffer, 1, strlen(buffer), out_file); + if (result == ED_OK) { + while (fgets(buffer, BUF_SIZE, fFrom) != NULL) { + size_t const written = fwrite(buffer, 1, strlen(buffer), fTo); hash->Add((unsigned char*)buffer, written); } + fflush(fTo); } - else { - return ED_FAILURE; - } - return ED_OK; + return result; } + /*}}}*/ +struct EdCommand { /*{{{*/ + size_t data_start; + size_t data_end; + size_t data_lines; + size_t first_line; + size_t last_line; + char type; +}; +#define IOV_COUNT 1024 /* Don't really want IOV_MAX since it can be arbitrarily large */ + /*}}}*/ +RredMethod::State RredMethod::patchMMap(FileFd &Patch, FileFd &From, /*{{{*/ + FileFd &out_file, Hashes *hash) const { +#ifdef _POSIX_MAPPED_FILES + MMap ed_cmds(Patch, MMap::ReadOnly); + MMap in_file(From, MMap::ReadOnly); + + if (ed_cmds.Size() == 0 || in_file.Size() == 0) + return MMAP_FAILED; + + EdCommand* commands = 0; + size_t command_count = 0; + size_t command_alloc = 0; + + const char* begin = (char*) ed_cmds.Data(); + const char* end = begin; + const char* ed_end = (char*) ed_cmds.Data() + ed_cmds.Size(); + + const char* input = (char*) in_file.Data(); + const char* input_end = (char*) in_file.Data() + in_file.Size(); + + size_t i; + + /* 1. Parse entire script. It is executed in reverse order, so we cather it + * in the `commands' buffer first + */ + + for(;;) { + EdCommand cmd; + cmd.data_start = 0; + cmd.data_end = 0; + + while(begin != ed_end && *begin == '\n') + ++begin; + while(end != ed_end && *end != '\n') + ++end; + if(end == ed_end && begin == end) + break; + + /* Determine command range */ + const char* tmp = begin; + + for(;;) { + /* atoll is safe despite lacking NUL-termination; we know there's an + * alphabetic character at end[-1] + */ + if(tmp == end) { + cmd.first_line = atol(begin); + cmd.last_line = cmd.first_line; + break; + } + if(*tmp == ',') { + cmd.first_line = atol(begin); + cmd.last_line = atol(tmp + 1); + break; + } + ++tmp; + } + + // which command to execute on this line(s)? + switch (end[-1]) { + case MODE_CHANGED: + if (Debug == true) + std::clog << "Change from line " << cmd.first_line << " to " << cmd.last_line << std::endl; + break; + case MODE_ADDED: + if (Debug == true) + std::clog << "Insert after line " << cmd.first_line << std::endl; + break; + case MODE_DELETED: + if (Debug == true) + std::clog << "Delete from line " << cmd.first_line << " to " << cmd.last_line << std::endl; + break; + default: + _error->Error("rred: Unknown ed command '%c'. Abort.", end[-1]); + free(commands); + return ED_PARSER; + } + cmd.type = end[-1]; + + /* Determine the size of the inserted text, so we don't have to scan this + * text again later. + */ + begin = end + 1; + end = begin; + cmd.data_lines = 0; + + if(cmd.type == MODE_ADDED || cmd.type == MODE_CHANGED) { + cmd.data_start = begin - (char*) ed_cmds.Data(); + while(end != ed_end) { + if(*end == '\n') { + if(end[-1] == '.' && end[-2] == '\n') + break; + ++cmd.data_lines; + } + ++end; + } + cmd.data_end = end - (char*) ed_cmds.Data() - 1; + begin = end + 1; + end = begin; + } + if(command_count == command_alloc) { + command_alloc = (command_alloc + 64) * 3 / 2; + commands = (EdCommand*) realloc(commands, command_alloc * sizeof(EdCommand)); + } + commands[command_count++] = cmd; + } + + struct iovec* iov = new struct iovec[IOV_COUNT]; + size_t iov_size = 0; + + size_t amount, remaining; + size_t line = 1; + EdCommand* cmd; + + /* 2. Execute script. We gather writes in a `struct iov' array, and flush + * using writev to minimize the number of system calls. Data is read + * directly from the memory mappings of the input file and the script. + */ + + for(i = command_count; i-- > 0; ) { + cmd = &commands[i]; + if(cmd->type == MODE_ADDED) + amount = cmd->first_line + 1; + else + amount = cmd->first_line; + + if(line < amount) { + begin = input; + while(line != amount) { + input = (const char*) memchr(input, '\n', input_end - input); + if(!input) + break; + ++line; + ++input; + } + iov[iov_size].iov_base = (void*) begin; + iov[iov_size].iov_len = input - begin; + hash->Add((const unsigned char*) begin, input - begin); -bool RredMethod::Fetch(FetchItem *Itm) + if(++iov_size == IOV_COUNT) { + writev(out_file.Fd(), iov, IOV_COUNT); + iov_size = 0; + } + } + + if(cmd->type == MODE_DELETED || cmd->type == MODE_CHANGED) { + remaining = (cmd->last_line - cmd->first_line) + 1; + line += remaining; + while(remaining) { + input = (const char*) memchr(input, '\n', input_end - input); + if(!input) + break; + --remaining; + ++input; + } + } + + if(cmd->type == MODE_CHANGED || cmd->type == MODE_ADDED) { + if(cmd->data_end != cmd->data_start) { + iov[iov_size].iov_base = (void*) ((char*)ed_cmds.Data() + cmd->data_start); + iov[iov_size].iov_len = cmd->data_end - cmd->data_start; + hash->Add((const unsigned char*) ((char*)ed_cmds.Data() + cmd->data_start), + iov[iov_size].iov_len); + + if(++iov_size == IOV_COUNT) { + writev(out_file.Fd(), iov, IOV_COUNT); + iov_size = 0; + } + } + } + } + + if(input != input_end) { + iov[iov_size].iov_base = (void*) input; + iov[iov_size].iov_len = input_end - input; + hash->Add((const unsigned char*) input, input_end - input); + ++iov_size; + } + + if(iov_size) { + writev(out_file.Fd(), iov, iov_size); + iov_size = 0; + } + + for(i = 0; i < iov_size; i += IOV_COUNT) { + if(iov_size - i < IOV_COUNT) + writev(out_file.Fd(), iov + i, iov_size - i); + else + writev(out_file.Fd(), iov + i, IOV_COUNT); + } + + delete [] iov; + free(commands); + + return ED_OK; +#else + return MMAP_FAILED; +#endif +} + /*}}}*/ +bool RredMethod::Fetch(FetchItem *Itm) /*{{{*/ { - Debug = _config->FindB("Debug::pkgAcquire::RRed",false); + Debug = _config->FindB("Debug::pkgAcquire::RRed", false); URI Get = Itm->Uri; string Path = Get.Host + Get.Path; // To account for relative paths - // Path contains the filename to patch + FetchResult Res; Res.Filename = Itm->DestFile; - URIStart(Res); - // Res.Filename the destination filename + if (Itm->Uri.empty() == true) { + Path = Itm->DestFile; + Itm->DestFile.append(".result"); + } else + URIStart(Res); if (Debug == true) std::clog << "Patching " << Path << " with " << Path @@ -211,19 +452,27 @@ bool RredMethod::Fetch(FetchItem *Itm) return false; Hashes Hash; - FILE* fFrom = fdopen(From.Fd(), "r"); - FILE* fPatch = fdopen(Patch.Fd(), "r"); - FILE* fTo = fdopen(To.Fd(), "w"); // now do the actual patching - if (ed_file(fPatch, fFrom, fTo, &Hash) != ED_OK) { - _error->Errno("rred", _("Could not patch file")); - return false; + State const result = patchMMap(Patch, From, To, &Hash); + if (result == MMAP_FAILED) { + // retry with patchFile + lseek(Patch.Fd(), 0, SEEK_SET); + lseek(From.Fd(), 0, SEEK_SET); + To.Open(Itm->DestFile,FileFd::WriteEmpty); + if (_error->PendingError() == true) + return false; + if (patchFile(Patch, From, To, &Hash) != ED_OK) { + return _error->WarningE("rred", _("Could not patch %s with mmap and with file operation usage - the patch seems to be corrupt."), Path.c_str()); + } else if (Debug == true) { + std::clog << "rred: finished file patching of " << Path << " after mmap failed." << std::endl; + } + } else if (result != ED_OK) { + return _error->Errno("rred", _("Could not patch %s with mmap (but no mmap specific fail) - the patch seems to be corrupt."), Path.c_str()); + } else if (Debug == true) { + std::clog << "rred: finished mmap patching of " << Path << std::endl; } // write out the result - fflush(fFrom); - fflush(fPatch); - fflush(fTo); From.Close(); Patch.Close(); To.Close(); @@ -250,13 +499,44 @@ bool RredMethod::Fetch(FetchItem *Itm) return true; } - -int main(int argc, char *argv[]) -{ - RredMethod Mth; - - Prog = strrchr(argv[0],'/'); - Prog++; - - return Mth.Run(); + /*}}}*/ +/** \brief Wrapper class for testing rred */ /*{{{*/ +class TestRredMethod : public RredMethod { +public: + /** \brief Run rred in debug test mode + * + * This method can be used to run the rred method outside + * of the "normal" acquire environment for easier testing. + * + * \param base basename of all files involved in this rred test + */ + bool Run(char const *base) { + _config->CndSet("Debug::pkgAcquire::RRed", "true"); + FetchItem *test = new FetchItem; + test->DestFile = base; + return Fetch(test); + } +}; + /*}}}*/ +/** \brief Starter for the rred method (or its test method) {{{ + * + * Used without parameters is the normal behavior for methods for + * the APT acquire system. While this works great for the acquire system + * it is very hard to test the method and therefore the method also + * accepts one parameter which will switch it directly to debug test mode: + * The test mode expects that if "Testfile" is given as parameter + * the file "Testfile" should be ed-style patched with "Testfile.ed" + * and will write the result to "Testfile.result". + */ +int main(int argc, char *argv[]) { + if (argc <= 1) { + RredMethod Mth; + return Mth.Run(); + } else { + TestRredMethod Mth; + bool result = Mth.Run(argv[1]); + _error->DumpErrors(); + return result; + } } + /*}}}*/ diff --git a/po/apt-all.pot b/po/apt-all.pot index ac231dff9..4ccf7413d 100644 --- a/po/apt-all.pot +++ b/po/apt-all.pot @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2009-12-10 22:06+0100\n" +"POT-Creation-Date: 2009-12-10 23:01+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME <EMAIL@ADDRESS>\n" "Language-Team: LANGUAGE <LL@li.org>\n" @@ -342,7 +342,7 @@ msgstr "" #: ftparchive/cachedb.cc:72 msgid "" -"DB format is invalid. If you upgraded from a older version of apt, please " +"DB format is invalid. If you upgraded from an older version of apt, please " "remove and re-create the database." msgstr "" @@ -357,11 +357,11 @@ msgstr "" msgid "Failed to stat %s" msgstr "" -#: ftparchive/cachedb.cc:238 +#: ftparchive/cachedb.cc:242 msgid "Archive has no control record" msgstr "" -#: ftparchive/cachedb.cc:444 +#: ftparchive/cachedb.cc:448 msgid "Unable to get a cursor" msgstr "" @@ -426,26 +426,26 @@ msgstr "" msgid " DeLink limit of %sB hit.\n" msgstr "" -#: ftparchive/writer.cc:388 +#: ftparchive/writer.cc:389 msgid "Archive had no package field" msgstr "" -#: ftparchive/writer.cc:396 ftparchive/writer.cc:627 +#: ftparchive/writer.cc:397 ftparchive/writer.cc:628 #, c-format msgid " %s has no override entry\n" msgstr "" -#: ftparchive/writer.cc:457 ftparchive/writer.cc:715 +#: ftparchive/writer.cc:458 ftparchive/writer.cc:716 #, c-format msgid " %s maintainer is %s not %s\n" msgstr "" -#: ftparchive/writer.cc:637 +#: ftparchive/writer.cc:638 #, c-format msgid " %s has no source override entry\n" msgstr "" -#: ftparchive/writer.cc:641 +#: ftparchive/writer.cc:642 #, c-format msgid " %s has no binary override entry either\n" msgstr "" @@ -1570,11 +1570,11 @@ msgid "File not found" msgstr "" #: methods/copy.cc:43 methods/gzip.cc:141 methods/gzip.cc:150 -#: methods/rred.cc:234 methods/rred.cc:243 +#: methods/rred.cc:483 methods/rred.cc:492 msgid "Failed to stat" msgstr "" -#: methods/copy.cc:80 methods/gzip.cc:147 methods/rred.cc:240 +#: methods/copy.cc:80 methods/gzip.cc:147 methods/rred.cc:489 msgid "Failed to set modification time" msgstr "" @@ -1700,7 +1700,7 @@ msgstr "" msgid "Unable to accept connection" msgstr "" -#: methods/ftp.cc:870 methods/http.cc:999 methods/rsh.cc:303 +#: methods/ftp.cc:870 methods/http.cc:1000 methods/rsh.cc:303 msgid "Problem hashing file" msgstr "" @@ -1764,22 +1764,22 @@ msgstr "" msgid "Connecting to %s" msgstr "" -#: methods/connect.cc:165 methods/connect.cc:184 +#: methods/connect.cc:166 methods/connect.cc:185 #, c-format msgid "Could not resolve '%s'" msgstr "" -#: methods/connect.cc:190 +#: methods/connect.cc:191 #, c-format msgid "Temporary failure resolving '%s'" msgstr "" -#: methods/connect.cc:193 +#: methods/connect.cc:194 #, c-format msgid "Something wicked happened resolving '%s:%s' (%i - %s)" msgstr "" -#: methods/connect.cc:240 +#: methods/connect.cc:241 #, c-format msgid "Unable to connect to %s:%s:" msgstr "" @@ -1864,47 +1864,47 @@ msgstr "" msgid "Unknown date format" msgstr "" -#: methods/http.cc:790 +#: methods/http.cc:791 msgid "Select failed" msgstr "" -#: methods/http.cc:795 +#: methods/http.cc:796 msgid "Connection timed out" msgstr "" -#: methods/http.cc:818 +#: methods/http.cc:819 msgid "Error writing to output file" msgstr "" -#: methods/http.cc:849 +#: methods/http.cc:850 msgid "Error writing to file" msgstr "" -#: methods/http.cc:877 +#: methods/http.cc:878 msgid "Error writing to the file" msgstr "" -#: methods/http.cc:891 +#: methods/http.cc:892 msgid "Error reading from server. Remote end closed connection" msgstr "" -#: methods/http.cc:893 +#: methods/http.cc:894 msgid "Error reading from server" msgstr "" -#: methods/http.cc:984 apt-pkg/contrib/mmap.cc:215 +#: methods/http.cc:985 apt-pkg/contrib/mmap.cc:233 msgid "Failed to truncate file" msgstr "" -#: methods/http.cc:1149 +#: methods/http.cc:1150 msgid "Bad header data" msgstr "" -#: methods/http.cc:1166 methods/http.cc:1221 +#: methods/http.cc:1167 methods/http.cc:1222 msgid "Connection failed" msgstr "" -#: methods/http.cc:1313 +#: methods/http.cc:1314 msgid "Internal error" msgstr "" @@ -1912,18 +1912,25 @@ msgstr "" msgid "Can't mmap an empty file" msgstr "" -#: apt-pkg/contrib/mmap.cc:81 apt-pkg/contrib/mmap.cc:187 +#: apt-pkg/contrib/mmap.cc:81 apt-pkg/contrib/mmap.cc:202 #, c-format msgid "Couldn't make mmap of %lu bytes" msgstr "" -#: apt-pkg/contrib/mmap.cc:234 +#: apt-pkg/contrib/mmap.cc:252 #, c-format msgid "" "Dynamic MMap ran out of room. Please increase the size of APT::Cache-Limit. " "Current value: %lu. (man 5 apt.conf)" msgstr "" +#: apt-pkg/contrib/mmap.cc:347 +#, c-format +msgid "" +"The size of a MMap has already reached the defined limit of %lu bytes,abort " +"the try to grow the MMap." +msgstr "" + #. d means days, h means hours, min means minutes, s means seconds #: apt-pkg/contrib/strutl.cc:346 #, c-format @@ -2533,39 +2540,39 @@ msgstr "" msgid "rename failed, %s (%s -> %s)." msgstr "" -#: apt-pkg/acquire-item.cc:395 +#: apt-pkg/acquire-item.cc:396 msgid "MD5Sum mismatch" msgstr "" -#: apt-pkg/acquire-item.cc:649 apt-pkg/acquire-item.cc:1411 +#: apt-pkg/acquire-item.cc:657 apt-pkg/acquire-item.cc:1419 msgid "Hash Sum mismatch" msgstr "" -#: apt-pkg/acquire-item.cc:1106 +#: apt-pkg/acquire-item.cc:1114 msgid "There is no public key available for the following key IDs:\n" msgstr "" -#: apt-pkg/acquire-item.cc:1216 +#: apt-pkg/acquire-item.cc:1224 #, c-format msgid "" "I wasn't able to locate a file for the %s package. This might mean you need " "to manually fix this package. (due to missing arch)" msgstr "" -#: apt-pkg/acquire-item.cc:1275 +#: apt-pkg/acquire-item.cc:1283 #, c-format msgid "" "I wasn't able to locate file for the %s package. This might mean you need to " "manually fix this package." msgstr "" -#: apt-pkg/acquire-item.cc:1316 +#: apt-pkg/acquire-item.cc:1324 #, c-format msgid "" "The package index files are corrupted. No Filename: field for package %s." msgstr "" -#: apt-pkg/acquire-item.cc:1403 +#: apt-pkg/acquire-item.cc:1411 msgid "Size mismatch" msgstr "" @@ -2807,8 +2814,18 @@ msgstr "" msgid "Not locked" msgstr "" -#: methods/rred.cc:219 -msgid "Could not patch file" +#: methods/rred.cc:465 +#, c-format +msgid "" +"Could not patch %s with mmap and with file operation usage - the patch seems " +"to be corrupt." +msgstr "" + +#: methods/rred.cc:470 +#, c-format +msgid "" +"Could not patch %s with mmap (but no mmap specific fail) - the patch seems " +"to be corrupt." msgstr "" #: methods/rsh.cc:330 diff --git a/test/libapt/assert.h b/test/libapt/assert.h new file mode 100644 index 000000000..5da76ae0a --- /dev/null +++ b/test/libapt/assert.h @@ -0,0 +1,21 @@ +#include <iostream> + +#define equals(x,y) assertEquals(x, y, __LINE__) + +template < typename X, typename Y > +void OutputAssert(X expect, char const* compare, Y get, unsigned long const &line) { + std::cerr << "Test FAILED: »" << expect << "« " << compare << " »" << get << "« at line " << line << std::endl; +} + +template < typename X, typename Y > +void assertEquals(X expect, Y get, unsigned long const &line) { + if (expect == get) + return; + OutputAssert(expect, "==", get, line); +} + +void assertEquals(unsigned int const &expect, int const &get, unsigned long const &line) { + if (get < 0) + OutputAssert(expect, "==", get, line); + assertEquals<unsigned int const&, unsigned int const&>(expect, get, line); +} diff --git a/test/libapt/getlanguages_test.cc b/test/libapt/getlanguages_test.cc new file mode 100644 index 000000000..fd3c8269f --- /dev/null +++ b/test/libapt/getlanguages_test.cc @@ -0,0 +1,91 @@ +#include <apt-pkg/aptconfiguration.h> +#include <apt-pkg/configuration.h> + +#include "assert.h" +#include <string> +#include <vector> + +#include <iostream> + +// simple helper to quickly output a vector of strings +void dumpVector(std::vector<std::string> vec) { + for (std::vector<std::string>::const_iterator v = vec.begin(); + v != vec.end(); v++) + std::cout << *v << std::endl; +} + +int main(int argc,char *argv[]) +{ + std::vector<std::string> vec = APT::Configuration::getLanguages(false, false, "de_DE.UTF-8"); + equals(vec.size(), 2); + equals(vec[0], "de"); + equals(vec[1], "en"); + + // Special: Check if the cache is actually in use + vec = APT::Configuration::getLanguages(false, true, "en_GB.UTF-8"); + equals(vec.size(), 2); + equals(vec[0], "de"); + equals(vec[1], "en"); + + vec = APT::Configuration::getLanguages(false, false, "en_GB.UTF-8"); + equals(vec.size(), 2); + equals(vec[0], "en_GB"); + equals(vec[1], "en"); + + vec = APT::Configuration::getLanguages(false, false, "pt_PR.UTF-8"); + equals(vec.size(), 3); + equals(vec[0], "pt_PR"); + equals(vec[1], "pt"); + equals(vec[2], "en"); + + vec = APT::Configuration::getLanguages(false, false, "ast_DE.UTF-8"); // bogus, but syntactical correct + equals(vec.size(), 2); + equals(vec[0], "ast"); + equals(vec[1], "en"); + + vec = APT::Configuration::getLanguages(false, false, "C"); + equals(vec.size(), 1); + equals(vec[0], "en"); + + _config->Set("Acquire::Languages::1", "environment"); + _config->Set("Acquire::Languages::2", "en"); + vec = APT::Configuration::getLanguages(false, false, "de_DE.UTF-8"); + equals(vec.size(), 2); + equals(vec[0], "de"); + equals(vec[1], "en"); + + _config->Set("Acquire::Languages::3", "de"); + vec = APT::Configuration::getLanguages(false, false, "de_DE.UTF-8"); + equals(vec.size(), 2); + equals(vec[0], "de"); + equals(vec[1], "en"); + + _config->Set("Acquire::Languages::1", "none"); + vec = APT::Configuration::getLanguages(false, false, "de_DE.UTF-8"); + equals(vec.size(), 0); + vec = APT::Configuration::getLanguages(true, false, "de_DE.UTF-8"); + equals(vec[0], "en"); + equals(vec[1], "de"); + + _config->Set("Acquire::Languages::1", "fr"); + _config->Set("Acquire::Languages", "de_DE"); + vec = APT::Configuration::getLanguages(false, false, "de_DE.UTF-8"); + equals(vec.size(), 1); + equals(vec[0], "de_DE"); + + _config->Set("Acquire::Languages", "none"); + vec = APT::Configuration::getLanguages(true, false, "de_DE.UTF-8"); + equals(vec.size(), 0); + + _config->Set("Acquire::Languages", ""); + //FIXME: Remove support for this deprecated setting + _config->Set("APT::Acquire::Translation", "ast_DE"); + vec = APT::Configuration::getLanguages(true, false, "de_DE.UTF-8"); + equals(vec.size(), 1); + equals(vec[0], "ast_DE"); + _config->Set("APT::Acquire::Translation", "none"); + vec = APT::Configuration::getLanguages(true, false, "de_DE.UTF-8"); + equals(vec.size(), 0); + + return 0; +} diff --git a/test/libapt/makefile b/test/libapt/makefile new file mode 100644 index 000000000..5712c025a --- /dev/null +++ b/test/libapt/makefile @@ -0,0 +1,19 @@ +# -*- make -*- +BASE=../.. +SUBDIR=test/libapt +BASENAME=_libapt_test + +# Bring in the default rules +include ../../buildlib/defaults.mak + +# Program for testing getLanguageCode +PROGRAM = getLanguages${BASENAME} +SLIBS = -lapt-pkg +SOURCE = getlanguages_test.cc +include $(PROGRAM_H) + +# Program for testing ParseDepends +PROGRAM = ParseDepends${BASENAME} +SLIBS = -lapt-pkg +SOURCE = parsedepends_test.cc +include $(PROGRAM_H) diff --git a/test/libapt/parsedepends_test.cc b/test/libapt/parsedepends_test.cc new file mode 100644 index 000000000..b7befa561 --- /dev/null +++ b/test/libapt/parsedepends_test.cc @@ -0,0 +1,128 @@ +#include <apt-pkg/deblistparser.h> +#include <apt-pkg/configuration.h> + +#include "assert.h" + +int main(int argc,char *argv[]) { + string Package; + string Version; + unsigned int Op = 5; + unsigned int Null = 0; + bool StripMultiArch = true; + bool ParseArchFlags = false; + _config->Set("APT::Architecture","dsk"); + + const char* Depends = + "debhelper:any (>= 5.0), " + "libdb-dev:any, " + "gettext:native (<= 0.12), " + "libcurl4-gnutls-dev:native | libcurl3-gnutls-dev (>> 7.15.5), " + "debiandoc-sgml, " + "apt (>= 0.7.25), " + "not-for-me [ !dsk ], " + "only-for-me [ dsk ], " + "overlord-dev:any (= 7.15.3~) | overlord-dev:native (>> 7.15.5), " + ; + + unsigned short runner = 0; +test: +// std::clog << (StripMultiArch ? "NO-Multi" : "Multi") << " " << (ParseArchFlags ? "Flags" : "NO-Flags") << std::endl; + + // Stripping MultiArch is currently the default setting to not confuse + // non-MultiArch capable users of the library with "strange" extensions. + const char* Start = Depends; + const char* End = Depends + strlen(Depends); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("debhelper", Package); + else + equals("debhelper:any", Package); + equals("5.0", Version); + equals(Null | pkgCache::Dep::GreaterEq, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("libdb-dev", Package); + else + equals("libdb-dev:any", Package); + equals("", Version); + equals(Null | pkgCache::Dep::NoOp, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("gettext", Package); + else + equals("gettext:native", Package); + equals("0.12", Version); + equals(Null | pkgCache::Dep::LessEq, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("libcurl4-gnutls-dev", Package); + else + equals("libcurl4-gnutls-dev:native", Package); + equals("", Version); + equals(Null | pkgCache::Dep::Or, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + equals("libcurl3-gnutls-dev", Package); + equals("7.15.5", Version); + equals(Null | pkgCache::Dep::Greater, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + equals("debiandoc-sgml", Package); + equals("", Version); + equals(Null | pkgCache::Dep::NoOp, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + equals("apt", Package); + equals("0.7.25", Version); + equals(Null | pkgCache::Dep::GreaterEq, Op); + + if (ParseArchFlags == true) { + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + equals("", Package); // not-for-me + } else { + equals(true, 0 == debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch)); + Start = strstr(Start, ","); + Start++; + } + + if (ParseArchFlags == true) { + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + equals("only-for-me", Package); + equals("", Version); + equals(Null | pkgCache::Dep::NoOp, Op); + } else { + equals(true, 0 == debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch)); + Start = strstr(Start, ","); + Start++; + } + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("overlord-dev", Package); + else + equals("overlord-dev:any", Package); + equals("7.15.3~", Version); + equals(Null | pkgCache::Dep::Equals | pkgCache::Dep::Or, Op); + + Start = debListParser::ParseDepends(Start, End, Package, Version, Op, ParseArchFlags, StripMultiArch); + if (StripMultiArch == true) + equals("overlord-dev", Package); + else + equals("overlord-dev:native", Package); + equals("7.15.5", Version); + equals(Null | pkgCache::Dep::Greater, Op); + + if (StripMultiArch == false) + ParseArchFlags = true; + StripMultiArch = !StripMultiArch; + + runner++; + if (runner < 4) + goto test; // this is the prove: tests are really evil ;) + + return 0; +} |