diff options
author | Michael Vogt <mvo@debian.org> | 2014-01-25 22:40:04 +0100 |
---|---|---|
committer | Michael Vogt <mvo@debian.org> | 2014-01-25 22:40:04 +0100 |
commit | 6fa5b7b71e5c8ec5d4bec7bec3ef311c4d9c826b (patch) | |
tree | a02ec2b10f30e713eedd3f7c63cc44051fb6c859 /apt-pkg | |
parent | 72af508bdbca55d0752aab3369faa1dc944a04e7 (diff) | |
parent | 9aef3908c892f9d9349d8bf8a5ceaeea313ba0fe (diff) |
Merge branch 'debian/sid' into ubuntu/master
Conflicts:
configure.ac
Diffstat (limited to 'apt-pkg')
-rw-r--r-- | apt-pkg/acquire-item.cc | 176 | ||||
-rw-r--r-- | apt-pkg/acquire-item.h | 100 | ||||
-rw-r--r-- | apt-pkg/contrib/fileutl.cc | 2 | ||||
-rw-r--r-- | apt-pkg/deb/dpkgpm.cc | 28 | ||||
-rw-r--r-- | apt-pkg/sourcelist.cc | 141 | ||||
-rw-r--r-- | apt-pkg/sourcelist.h | 10 | ||||
-rw-r--r-- | apt-pkg/tagfile.cc | 84 |
7 files changed, 461 insertions, 80 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc index b76921312..1185908f3 100644 --- a/apt-pkg/acquire-item.cc +++ b/apt-pkg/acquire-item.cc @@ -369,10 +369,10 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner, return; } - if(Debug) - std::clog << "pkgAcqIndexDiffs::pkgAcqIndexDiffs(): " - << CurrentPackagesFile << std::endl; - + if(Debug) + std::clog << "pkgAcqDiffIndex::pkgAcqDiffIndex(): " + << CurrentPackagesFile << std::endl; + QueueURI(Desc); } @@ -398,8 +398,8 @@ string pkgAcqDiffIndex::Custom600Headers() bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ { if(Debug) - std::clog << "pkgAcqIndexDiffs::ParseIndexDiff() " << IndexDiffFile - << std::endl; + std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile + << std::endl; pkgTagSection Tags; string ServerSha1; @@ -426,16 +426,18 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ SHA1.AddFD(fd); string const local_sha1 = SHA1.Result(); - if(local_sha1 == ServerSha1) + if(local_sha1 == ServerSha1) { - // we have the same sha1 as the server + // we have the same sha1 as the server so we are done here if(Debug) std::clog << "Package file is up-to-date" << std::endl; - // set found to true, this will queue a pkgAcqIndexDiffs with - // a empty availabe_patches - found = true; - } - else + // list cleanup needs to know that this file as well as the already + // present index is ours, so we create an empty diff to save it for us + new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, + ExpectedHash, ServerSha1, available_patches); + return true; + } + else { if(Debug) std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl; @@ -460,7 +462,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ if (available_patches.empty() == false) { // patching with too many files is rather slow compared to a fast download - unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 20); + unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0); if (fileLimit != 0 && fileLimit < available_patches.size()) { if (Debug) @@ -496,14 +498,37 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/ } // we have something, queue the next diff - if(found) + if(found) { // queue the diffs string::size_type const last_space = Description.rfind(" "); if(last_space != string::npos) Description.erase(last_space, Description.size()-last_space); - new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, - ExpectedHash, ServerSha1, available_patches); + + /* decide if we should download patches one by one or in one go: + The first is good if the server merges patches, but many don't so client + based merging can be attempt in which case the second is better. + "bad things" will happen if patches are merged on the server, + but client side merging is attempt as well */ + bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true); + if (pdiff_merge == true) + { + // reprepro adds this flag if it has merged patches on the server + std::string const precedence = Tags.FindS("X-Patch-Precedence"); + pdiff_merge = (precedence != "merged"); + } + + if (pdiff_merge == false) + new pkgAcqIndexDiffs(Owner, RealURI, Description, Desc.ShortDesc, + ExpectedHash, ServerSha1, available_patches); + else + { + std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size()); + for(size_t i = 0; i < available_patches.size(); ++i) + (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, RealURI, Description, Desc.ShortDesc, ExpectedHash, + available_patches[i], diffs); + } + Complete = false; Status = StatDone; Dequeue(); @@ -752,6 +777,123 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size,string Md5Has } } /*}}}*/ +// AcqIndexMergeDiffs::AcqIndexMergeDiffs - Constructor /*{{{*/ +pkgAcqIndexMergeDiffs::pkgAcqIndexMergeDiffs(pkgAcquire *Owner, + string const &URI, string const &URIDesc, + string const &ShortDesc, HashString const &ExpectedHash, + DiffInfo const &patch, + std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches) + : Item(Owner), RealURI(URI), ExpectedHash(ExpectedHash), + patch(patch),allPatches(allPatches), State(StateFetchDiff) +{ + + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(URI); + + Debug = _config->FindB("Debug::pkgAcquire::Diffs",false); + + Description = URIDesc; + Desc.Owner = this; + Desc.ShortDesc = ShortDesc; + + Desc.URI = string(RealURI) + ".diff/" + patch.file + ".gz"; + Desc.Description = Description + " " + patch.file + string(".pdiff"); + DestFile = _config->FindDir("Dir::State::lists") + "partial/"; + DestFile += URItoFileName(RealURI + ".diff/" + patch.file); + + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs: " << Desc.URI << std::endl; + + QueueURI(Desc); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Failed(string Message,pkgAcquire::MethodConfig *Cnf)/*{{{*/ +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs failed: " << Desc.URI << " with " << Message << std::endl; + Complete = false; + Status = StatDone; + Dequeue(); + + // check if we are the first to fail, otherwise we are done here + State = StateDoneDiff; + for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State == StateErrorDiff) + return; + + // first failure means we should fallback + State = StateErrorDiff; + std::clog << "Falling back to normal index file aquire" << std::endl; + new pkgAcqIndex(Owner, RealURI, Description,Desc.ShortDesc, + ExpectedHash); +} + /*}}}*/ +void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,string Md5Hash, /*{{{*/ + pkgAcquire::MethodConfig *Cnf) +{ + if(Debug) + std::clog << "pkgAcqIndexMergeDiffs::Done(): " << Desc.URI << std::endl; + + Item::Done(Message,Size,Md5Hash,Cnf); + + string const FinalFile = _config->FindDir("Dir::State::lists") + URItoFileName(RealURI); + + if (State == StateFetchDiff) + { + // rred expects the patch as $FinalFile.ed.$patchname.gz + Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz"); + + // check if this is the last completed diff + State = StateDoneDiff; + for (std::vector<pkgAcqIndexMergeDiffs *>::const_iterator I = allPatches->begin(); + I != allPatches->end(); ++I) + if ((*I)->State != StateDoneDiff) + { + if(Debug) + std::clog << "Not the last done diff in the batch: " << Desc.URI << std::endl; + return; + } + + // this is the last completed diff, so we are ready to apply now + State = StateApplyDiff; + + if(Debug) + std::clog << "Sending to rred method: " << FinalFile << std::endl; + + Local = true; + Desc.URI = "rred:" + FinalFile; + QueueURI(Desc); + Mode = "rred"; + return; + } + // success in download/apply all diffs, clean up + else if (State == StateApplyDiff) + { + // see if we really got the expected file + if(!ExpectedHash.empty() && !ExpectedHash.VerifyFile(DestFile)) + { + RenameOnError(HashSumMismatch); + return; + } + + // move the result into place + if(Debug) + std::clog << "Moving patched file in place: " << std::endl + << DestFile << " -> " << FinalFile << std::endl; + Rename(DestFile, FinalFile); + chmod(FinalFile.c_str(), 0644); + + // otherwise lists cleanup will eat the file + DestFile = FinalFile; + + // all set and done + Complete = true; + if(Debug) + std::clog << "allDone: " << DestFile << "\n" << std::endl; + } +} + /*}}}*/ // AcqIndex::AcqIndex - Constructor /*{{{*/ // --------------------------------------------------------------------- /* The package file is added to the queue and a second class is diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h index 6b4f73708..5a1c7979c 100644 --- a/apt-pkg/acquire-item.h +++ b/apt-pkg/acquire-item.h @@ -429,7 +429,105 @@ class pkgAcqDiffIndex : public pkgAcquire::Item std::string ShortDesc, HashString ExpectedHash); }; /*}}}*/ -/** \brief An item that is responsible for fetching all the patches {{{ +/** \brief An item that is responsible for fetching client-merge patches {{{ + * that need to be applied to a given package index file. + * + * Instead of downloading and applying each patch one by one like its + * sister #pkgAcqIndexDiffs this class will download all patches at once + * and call rred with all the patches downloaded once. Rred will then + * merge and apply them in one go, which should be a lot faster – but is + * incompatible with server-based merges of patches like reprepro can do. + * + * \sa pkgAcqDiffIndex, pkgAcqIndex + */ +class pkgAcqIndexMergeDiffs : public pkgAcquire::Item +{ + protected: + + /** \brief If \b true, debugging output will be written to + * std::clog. + */ + bool Debug; + + /** \brief description of the item that is currently being + * downloaded. + */ + pkgAcquire::ItemDesc Desc; + + /** \brief URI of the package index file that is being + * reconstructed. + */ + std::string RealURI; + + /** \brief HashSum of the package index file that is being + * reconstructed. + */ + HashString ExpectedHash; + + /** \brief description of the file being downloaded. */ + std::string Description; + + /** \brief information about the current patch */ + struct DiffInfo const patch; + + /** \brief list of all download items for the patches */ + std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches; + + /** The current status of this patch. */ + enum DiffState + { + /** \brief The diff is currently being fetched. */ + StateFetchDiff, + + /** \brief The diff is currently being applied. */ + StateApplyDiff, + + /** \brief the work with this diff is done */ + StateDoneDiff, + + /** \brief something bad happened and fallback was triggered */ + StateErrorDiff + } State; + + public: + /** \brief Called when the patch file failed to be downloaded. + * + * This method will fall back to downloading the whole index file + * outright; its arguments are ignored. + */ + virtual void Failed(std::string Message,pkgAcquire::MethodConfig *Cnf); + + virtual void Done(std::string Message,unsigned long long Size,std::string Md5Hash, + pkgAcquire::MethodConfig *Cnf); + virtual std::string DescURI() {return RealURI + "Index";}; + + /** \brief Create an index merge-diff item. + * + * \param Owner The pkgAcquire object that owns this item. + * + * \param URI The URI of the package index file being + * reconstructed. + * + * \param URIDesc A long description of this item. + * + * \param ShortDesc A brief description of this item. + * + * \param ExpectedHash The expected md5sum of the completely + * reconstructed package index file; the index file will be tested + * against this value when it is entirely reconstructed. + * + * \param patch contains infos about the patch this item is supposed + * to download which were read from the index + * + * \param allPatches contains all related items so that each item can + * check if it was the last one to complete the download step + */ + pkgAcqIndexMergeDiffs(pkgAcquire *Owner,std::string const &URI,std::string const &URIDesc, + std::string const &ShortDesc, HashString const &ExpectedHash, + DiffInfo const &patch, std::vector<pkgAcqIndexMergeDiffs*> const * const allPatches); +}; + /*}}}*/ +/** \brief An item that is responsible for fetching server-merge patches {{{ * that need to be applied to a given package index file. * * After downloading and applying a single patch, this item will diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc index efbf7aaf4..ffb8b4b53 100644 --- a/apt-pkg/contrib/fileutl.cc +++ b/apt-pkg/contrib/fileutl.cc @@ -465,7 +465,7 @@ std::vector<string> GetListOfFilesInDir(string const &Dir, std::vector<string> c const char *C = Ent->d_name; for (; *C != 0; ++C) if (isalpha(*C) == 0 && isdigit(*C) == 0 - && *C != '_' && *C != '-') { + && *C != '_' && *C != '-' && *C != ':') { // no required extension -> dot is a bad character if (*C == '.' && Ext.empty() == false) continue; diff --git a/apt-pkg/deb/dpkgpm.cc b/apt-pkg/deb/dpkgpm.cc index e1f127d3b..a72745774 100644 --- a/apt-pkg/deb/dpkgpm.cc +++ b/apt-pkg/deb/dpkgpm.cc @@ -1027,6 +1027,12 @@ bool pkgDPkgPM::Go(int StatusFd) void pkgDPkgPM::StartPtyMagic() { + if (_config->FindB("Dpkg::Use-Pty", true) == false) + { + d->master = d->slave = -1; + return; + } + // setup the pty and stuff struct winsize win; @@ -1035,10 +1041,9 @@ void pkgDPkgPM::StartPtyMagic() _error->PushToStack(); if (tcgetattr(STDOUT_FILENO, &d->tt) == 0) { - ioctl(1, TIOCGWINSZ, (char *)&win); - if (_config->FindB("Dpkg::Use-Pty", true) == false) + if (ioctl(1, TIOCGWINSZ, (char *)&win) < 0) { - d->master = d->slave = -1; + _error->Errno("ioctl", _("ioctl(TIOCGWINSZ) failed")); } else if (openpty(&d->master, &d->slave, NULL, &d->tt, &win) < 0) { _error->Errno("openpty", _("Can not write log (%s)"), _("Is /dev/pts mounted?")); @@ -1392,12 +1397,17 @@ bool pkgDPkgPM::GoNoABIBreak(APT::Progress::PackageManager *progress) if(d->slave >= 0 && d->master >= 0) { setsid(); - ioctl(d->slave, TIOCSCTTY, 0); - close(d->master); - dup2(d->slave, 0); - dup2(d->slave, 1); - dup2(d->slave, 2); - close(d->slave); + int res = ioctl(d->slave, TIOCSCTTY, 0); + if (res < 0) { + std::cerr << "ioctl(TIOCSCTTY) failed for fd: " + << d->slave << std::endl; + } else { + close(d->master); + dup2(d->slave, 0); + dup2(d->slave, 1); + dup2(d->slave, 2); + close(d->slave); + } } close(fd[0]); // close the read end of the pipe diff --git a/apt-pkg/sourcelist.cc b/apt-pkg/sourcelist.cc index 0fd237cad..1f5179885 100644 --- a/apt-pkg/sourcelist.cc +++ b/apt-pkg/sourcelist.cc @@ -17,8 +17,10 @@ #include <apt-pkg/configuration.h> #include <apt-pkg/metaindex.h> #include <apt-pkg/indexfile.h> +#include <apt-pkg/tagfile.h> #include <fstream> +#include <algorithm> #include <apti18n.h> /*}}}*/ @@ -70,6 +72,69 @@ bool pkgSourceList::Type::FixupURI(string &URI) const return true; } /*}}}*/ +bool pkgSourceList::Type::ParseStanza(vector<metaIndex *> &List, + pkgTagSection &Tags, + int i, + FileFd &Fd) +{ + map<string, string> Options; + + string Enabled = Tags.FindS("Enabled"); + if (Enabled.size() > 0 && StringToBool(Enabled) == false) + return true; + + // Define external/internal options + const char* option_deb822[] = { + "Architectures", "Architectures-Add", "Architectures-Remove", "Trusted", + }; + const char* option_internal[] = { + "arch", "arch+", "arch-", "trusted", + }; + for (unsigned int j=0; j < sizeof(option_deb822)/sizeof(char*); j++) + if (Tags.Exists(option_deb822[j])) + { + // for deb822 the " " is the delimiter, but the backend expects "," + std::string option = Tags.FindS(option_deb822[j]); + std::replace(option.begin(), option.end(), ' ', ','); + Options[option_internal[j]] = option; + } + + // now create one item per suite/section + string Suite = Tags.FindS("Suites"); + Suite = SubstVar(Suite,"$(ARCH)",_config->Find("APT::Architecture")); + string const Section = Tags.FindS("Sections"); + string URIS = Tags.FindS("URIs"); + + std::vector<std::string> list_uris = StringSplit(URIS, " "); + std::vector<std::string> list_dist = StringSplit(Suite, " "); + std::vector<std::string> list_section = StringSplit(Section, " "); + + for (std::vector<std::string>::const_iterator U = list_uris.begin(); + U != list_uris.end(); U++) + { + std::string URI = (*U); + if (!FixupURI(URI)) + { + _error->Error(_("Malformed stanza %u in source list %s (URI parse)"),i,Fd.Name().c_str()); + return false; + } + + for (std::vector<std::string>::const_iterator I = list_dist.begin(); + I != list_dist.end(); I++) + { + for (std::vector<std::string>::const_iterator J = list_section.begin(); + J != list_section.end(); J++) + { + if (CreateItem(List, URI, (*I), (*J), Options) == false) + { + return false; + } + } + } + } + return true; +} + // Type::ParseLine - Parse a single line /*{{{*/ // --------------------------------------------------------------------- /* This is a generic one that is the 'usual' format for sources.list @@ -159,7 +224,6 @@ bool pkgSourceList::Type::ParseLine(vector<metaIndex *> &List, return true; } /*}}}*/ - // SourceList::pkgSourceList - Constructors /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -181,7 +245,6 @@ pkgSourceList::~pkgSourceList() delete *I; } /*}}}*/ - /*}}}*/ // SourceList::ReadMainList - Read the main source list from etc /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -216,7 +279,6 @@ bool pkgSourceList::ReadMainList() return Res; } /*}}}*/ -// CNC:2003-03-03 - Needed to preserve backwards compatibility. // SourceList::Reset - Clear the sourcelist contents /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -227,7 +289,6 @@ void pkgSourceList::Reset() SrcList.erase(SrcList.begin(),SrcList.end()); } /*}}}*/ -// CNC:2003-03-03 - Function moved to ReadAppend() and Reset(). // SourceList::Read - Parse the sourcelist file /*{{{*/ // --------------------------------------------------------------------- /* */ @@ -242,16 +303,28 @@ bool pkgSourceList::Read(string File) /* */ bool pkgSourceList::ReadAppend(string File) { + if (_config->FindB("APT::Sources::Use-Deb822", false) == true) + { + int lines_parsed =ParseFileDeb822(File); + if (lines_parsed < 0) + return false; + else if (lines_parsed > 0) + return true; + // no lines parsed ... fall through and use old style parser + } + return ParseFileOldStyle(File); +} + +// SourceList::ReadFileOldStyle - Read Traditional style sources.list /*{{{*/ +// --------------------------------------------------------------------- +/* */ +bool pkgSourceList::ParseFileOldStyle(string File) +{ // Open the stream for reading ifstream F(File.c_str(),ios::in /*| ios::nocreate*/); if (!F != 0) return _error->Errno("ifstream::ifstream",_("Opening %s"),File.c_str()); - -#if 0 // Now Reset() does this. - for (const_iterator I = SrcList.begin(); I != SrcList.end(); I++) - delete *I; - SrcList.erase(SrcList.begin(),SrcList.end()); -#endif + // CNC:2003-12-10 - 300 is too short. char Buffer[1024]; @@ -298,6 +371,54 @@ bool pkgSourceList::ReadAppend(string File) return true; } /*}}}*/ +// SourceList::ParseFileDeb822 - Parse deb822 style sources.list /*{{{*/ +// --------------------------------------------------------------------- +/* Returns: the number of stanzas parsed*/ +int pkgSourceList::ParseFileDeb822(string File) +{ + pkgTagSection Tags; + unsigned int i=0; + + // see if we can read the file + _error->PushToStack(); + FileFd Fd(File, FileFd::ReadOnly); + pkgTagFile Sources(&Fd); + if (_error->PendingError() == true) + { + _error->RevertToStack(); + return 0; + } + _error->MergeWithStack(); + + // read step by step + while (Sources.Step(Tags) == true) + { + if(!Tags.Exists("Types")) + continue; + + string const types = Tags.FindS("Types"); + std::vector<std::string> list_types = StringSplit(types, " "); + for (std::vector<std::string>::const_iterator I = list_types.begin(); + I != list_types.end(); I++) + { + Type *Parse = Type::GetType((*I).c_str()); + if (Parse == 0) + { + _error->Error(_("Type '%s' is not known on stanza %u in source list %s"), (*I).c_str(),i,Fd.Name().c_str()); + return -1; + } + + if (!Parse->ParseStanza(SrcList, Tags, i, Fd)) + return -1; + + i++; + } + } + + // we are done, return the number of stanzas read + return i; +} + /*}}}*/ // SourceList::FindIndex - Get the index associated with a file /*{{{*/ // --------------------------------------------------------------------- /* */ diff --git a/apt-pkg/sourcelist.h b/apt-pkg/sourcelist.h index 02e27101a..0ccb4aa00 100644 --- a/apt-pkg/sourcelist.h +++ b/apt-pkg/sourcelist.h @@ -31,6 +31,7 @@ #include <vector> #include <map> #include <apt-pkg/pkgcache.h> +#include <apt-pkg/tagfile.h> #ifndef APT_8_CLEANER_HEADERS #include <apt-pkg/metaindex.h> @@ -60,6 +61,10 @@ class pkgSourceList const char *Label; bool FixupURI(std::string &URI) const; + virtual bool ParseStanza(std::vector<metaIndex *> &List, + pkgTagSection &Tags, + int stanza_n, + FileFd &Fd); virtual bool ParseLine(std::vector<metaIndex *> &List, const char *Buffer, unsigned long const &CurLine,std::string const &File) const; @@ -75,7 +80,10 @@ class pkgSourceList protected: std::vector<metaIndex *> SrcList; - + + int ParseFileDeb822(std::string File); + bool ParseFileOldStyle(std::string File); + public: bool ReadMainList(); diff --git a/apt-pkg/tagfile.cc b/apt-pkg/tagfile.cc index bef3c76ba..b92b2c15a 100644 --- a/apt-pkg/tagfile.cc +++ b/apt-pkg/tagfile.cc @@ -567,52 +567,54 @@ bool TFRewrite(FILE *Output,pkgTagSection const &Tags,const char *Order[], } // Write all all of the tags, in order. - for (unsigned int I = 0; Order[I] != 0; I++) + if (Order != NULL) { - bool Rewritten = false; - - // See if this is a field that needs to be rewritten - for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++) + for (unsigned int I = 0; Order[I] != 0; I++) { - if (strcasecmp(Rewrite[J].Tag,Order[I]) == 0) - { - Visited[J] |= 2; - if (Rewrite[J].Rewrite != 0 && Rewrite[J].Rewrite[0] != 0) - { - if (isspace(Rewrite[J].Rewrite[0])) - fprintf(Output,"%s:%s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite); - else - fprintf(Output,"%s: %s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite); - } - - Rewritten = true; - break; - } - } + bool Rewritten = false; + + // See if this is a field that needs to be rewritten + for (unsigned int J = 0; Rewrite != 0 && Rewrite[J].Tag != 0; J++) + { + if (strcasecmp(Rewrite[J].Tag,Order[I]) == 0) + { + Visited[J] |= 2; + if (Rewrite[J].Rewrite != 0 && Rewrite[J].Rewrite[0] != 0) + { + if (isspace(Rewrite[J].Rewrite[0])) + fprintf(Output,"%s:%s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite); + else + fprintf(Output,"%s: %s\n",Rewrite[J].NewTag,Rewrite[J].Rewrite); + } + Rewritten = true; + break; + } + } - // See if it is in the fragment - unsigned Pos; - if (Tags.Find(Order[I],Pos) == false) - continue; - Visited[Pos] |= 1; - - if (Rewritten == true) - continue; + // See if it is in the fragment + unsigned Pos; + if (Tags.Find(Order[I],Pos) == false) + continue; + Visited[Pos] |= 1; + + if (Rewritten == true) + continue; - /* Write out this element, taking a moment to rewrite the tag - in case of changes of case. */ - const char *Start; - const char *Stop; - Tags.Get(Start,Stop,Pos); + /* Write out this element, taking a moment to rewrite the tag + in case of changes of case. */ + const char *Start; + const char *Stop; + Tags.Get(Start,Stop,Pos); - if (fputs(Order[I],Output) < 0) - return _error->Errno("fputs","IO Error to output"); - Start += strlen(Order[I]); - if (fwrite(Start,Stop - Start,1,Output) != 1) - return _error->Errno("fwrite","IO Error to output"); - if (Stop[-1] != '\n') - fprintf(Output,"\n"); - } + if (fputs(Order[I],Output) < 0) + return _error->Errno("fputs","IO Error to output"); + Start += strlen(Order[I]); + if (fwrite(Start,Stop - Start,1,Output) != 1) + return _error->Errno("fwrite","IO Error to output"); + if (Stop[-1] != '\n') + fprintf(Output,"\n"); + } + } // Now write all the old tags that were missed. for (unsigned int I = 0; I != Tags.Count(); I++) |