summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2014-09-29 22:45:52 +0200
committerDavid Kalnischkies <david@kalnischkies.de>2014-09-30 01:53:47 +0200
commitf6d4ab9ad8a2cfe52737ab620dd252cf8ceec43d (patch)
tree282df83623c80846711fba7df748a69131d5b864
parent3809194b662f48733916e6248cd0c141f281313d (diff)
support parsing of all hashes for pdiff
The fileformat of a pdiff index stores currently only SHA1 hashes. With this change, we look for all other hashes we support as well and take what we get, so that we can work after the release of jessie to get right of SHA1 if we want to. Note that the completely patched file is and was checked against the hashes collected from the Release file, so this transition isn't mission critical.
-rw-r--r--apt-pkg/acquire-item.cc410
-rw-r--r--apt-pkg/acquire-item.h22
-rw-r--r--apt-pkg/contrib/hashes.cc4
-rw-r--r--debian/libapt-pkg4.13.symbols3
-rwxr-xr-xtest/integration/test-pdiff-usage48
5 files changed, 328 insertions, 159 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index f46c8a6e4..779f828d3 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -392,9 +392,7 @@ pkgAcqDiffIndex::pkgAcqDiffIndex(pkgAcquire *Owner,
Desc.URI.substr(0,strlen("file:/")) == "file:/")
{
// we don't have a pkg file or we don't want to queue
- if(Debug)
- std::clog << "No index file, local or canceld by user" << std::endl;
- Failed("", NULL);
+ Failed("No index file, local or canceld by user", NULL);
return;
}
@@ -426,156 +424,264 @@ string pkgAcqDiffIndex::Custom600Headers() const
/*}}}*/
bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
{
+ // failing here is fine: our caller will take care of trying to
+ // get the complete file if patching fails
if(Debug)
std::clog << "pkgAcqDiffIndex::ParseIndexDiff() " << IndexDiffFile
<< std::endl;
- pkgTagSection Tags;
- string ServerSha1;
- vector<DiffInfo> available_patches;
-
FileFd Fd(IndexDiffFile,FileFd::ReadOnly);
pkgTagFile TF(&Fd);
if (_error->PendingError() == true)
return false;
- if(TF.Step(Tags) == true)
+ pkgTagSection Tags;
+ if(unlikely(TF.Step(Tags) == false))
+ return false;
+
+ HashStringList ServerHashes;
+ unsigned long long ServerSize = 0;
+
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
{
- bool found = false;
- DiffInfo d;
- string size;
+ std::string tagname = *type;
+ tagname.append("-Current");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
- string const tmp = Tags.FindS("SHA1-Current");
+ string hash;
+ unsigned long long size;
std::stringstream ss(tmp);
- ss >> ServerSha1 >> size;
- unsigned long const ServerSize = atol(size.c_str());
+ ss >> hash >> size;
+ if (unlikely(hash.empty() == true))
+ continue;
+ if (unlikely(ServerSize != 0 && ServerSize != size))
+ continue;
+ ServerHashes.push_back(HashString(*type, hash));
+ ServerSize = size;
+ }
- FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
- SHA1Summation SHA1;
- SHA1.AddFD(fd);
- string const local_sha1 = SHA1.Result();
+ if (ServerHashes.usable() == false)
+ {
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Did not find a good hashsum in the index" << std::endl;
+ return false;
+ }
- if(local_sha1 == ServerSha1)
- {
- // we have the same sha1 as the server so we are done here
- if(Debug)
- std::clog << "Package file is up-to-date" << std::endl;
- // list cleanup needs to know that this file as well as the already
- // present index is ours, so we create an empty diff to save it for us
- new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
- ServerSha1, available_patches);
- return true;
- }
- else
+ if (ServerHashes != HashSums())
+ {
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": Index has different hashes than parser, probably older, so fail pdiffing" << std::endl;
+ return false;
+ }
+
+ if (ServerHashes.VerifyFile(CurrentPackagesFile) == true)
+ {
+ // we have the same sha1 as the server so we are done here
+ if(Debug)
+ std::clog << "pkgAcqDiffIndex: Package file is up-to-date" << std::endl;
+ // list cleanup needs to know that this file as well as the already
+ // present index is ours, so we create an empty diff to save it for us
+ new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser);
+ return true;
+ }
+
+ FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
+ Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if(Debug)
+ std::clog << "Server-Current: " << ServerHashes.find(NULL)->toStr() << " and we start at "
+ << fd.Name() << " " << fd.FileSize() << " " << LocalHashes.find(NULL)->toStr() << std::endl;
+
+ // parse all of (provided) history
+ vector<DiffInfo> available_patches;
+ bool firstAcceptedHashes = true;
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ if (LocalHashes.find(*type) == NULL)
+ continue;
+
+ std::string tagname = *type;
+ tagname.append("-History");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
+
+ string hash, filename;
+ unsigned long long size;
+ std::stringstream ss(tmp);
+
+ while (ss >> hash >> size >> filename)
{
- if(Debug)
- std::clog << "SHA1-Current: " << ServerSha1 << " and we start at "<< fd.Name() << " " << fd.Size() << " " << local_sha1 << std::endl;
+ if (unlikely(hash.empty() == true || filename.empty() == true))
+ continue;
- // check the historie and see what patches we need
- string const history = Tags.FindS("SHA1-History");
- std::stringstream hist(history);
- while(hist >> d.sha1 >> size >> d.file)
+ // see if we have a record for this file already
+ std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ for (; cur != available_patches.end(); ++cur)
{
- // read until the first match is found
- // from that point on, we probably need all diffs
- if(d.sha1 == local_sha1)
- found=true;
- else if (found == false)
+ if (cur->file != filename || unlikely(cur->result_size != size))
continue;
-
- if(Debug)
- std::clog << "Need to get diff: " << d.file << std::endl;
- available_patches.push_back(d);
+ cur->result_hashes.push_back(HashString(*type, hash));
+ break;
}
-
- if (available_patches.empty() == false)
+ if (cur != available_patches.end())
+ continue;
+ if (firstAcceptedHashes == true)
{
- // patching with too many files is rather slow compared to a fast download
- unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
- if (fileLimit != 0 && fileLimit < available_patches.size())
- {
- if (Debug)
- std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
- << ") so fallback to complete download" << std::endl;
- return false;
- }
-
- // see if the patches are too big
- found = false; // it was true and it will be true again at the end
- d = *available_patches.begin();
- string const firstPatch = d.file;
- unsigned long patchesSize = 0;
- std::stringstream patches(Tags.FindS("SHA1-Patches"));
- while(patches >> d.sha1 >> size >> d.file)
- {
- if (firstPatch == d.file)
- found = true;
- else if (found == false)
- continue;
-
- patchesSize += atol(size.c_str());
- }
- unsigned long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
- if (sizeLimit > 0 && (sizeLimit/100) < patchesSize)
- {
- if (Debug)
- std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
- << ") so fallback to complete download" << std::endl;
- return false;
- }
+ DiffInfo next;
+ next.file = filename;
+ next.result_hashes.push_back(HashString(*type, hash));
+ next.result_size = size;
+ next.patch_size = 0;
+ available_patches.push_back(next);
+ }
+ else
+ {
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
+ << " wasn't in the list for the first parsed hash! (history)" << std::endl;
+ break;
}
}
+ firstAcceptedHashes = false;
+ }
+
+ if (unlikely(available_patches.empty() == true))
+ {
+ if (Debug)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
+ << "Couldn't find any patches for the patch series." << std::endl;
+ return false;
+ }
+
+ for (char const * const * type = HashString::SupportedHashes(); *type != NULL; ++type)
+ {
+ if (LocalHashes.find(*type) == NULL)
+ continue;
- // we have something, queue the next diff
- if(found)
+ std::string tagname = *type;
+ tagname.append("-Patches");
+ std::string const tmp = Tags.FindS(tagname.c_str());
+ if (tmp.empty() == true)
+ continue;
+
+ string hash, filename;
+ unsigned long long size;
+ std::stringstream ss(tmp);
+
+ while (ss >> hash >> size >> filename)
{
- // queue the diffs
- string::size_type const last_space = Description.rfind(" ");
- if(last_space != string::npos)
- Description.erase(last_space, Description.size()-last_space);
-
- /* decide if we should download patches one by one or in one go:
- The first is good if the server merges patches, but many don't so client
- based merging can be attempt in which case the second is better.
- "bad things" will happen if patches are merged on the server,
- but client side merging is attempt as well */
- bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
- if (pdiff_merge == true)
- {
- // reprepro adds this flag if it has merged patches on the server
- std::string const precedence = Tags.FindS("X-Patch-Precedence");
- pdiff_merge = (precedence != "merged");
- }
+ if (unlikely(hash.empty() == true || filename.empty() == true))
+ continue;
- if (pdiff_merge == false)
- {
- new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
- ServerSha1, available_patches);
- }
- else
+ // see if we have a record for this file already
+ std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ for (; cur != available_patches.end(); ++cur)
{
- std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
- for(size_t i = 0; i < available_patches.size(); ++i)
- (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, Target,
- ExpectedHashes,
- MetaIndexParser,
- available_patches[i],
- diffs);
+ if (cur->file != filename)
+ continue;
+ if (unlikely(cur->patch_size != 0 && cur->patch_size != size))
+ continue;
+ cur->patch_hashes.push_back(HashString(*type, hash));
+ cur->patch_size = size;
+ break;
}
-
- Complete = false;
- Status = StatDone;
- Dequeue();
- return true;
+ if (cur != available_patches.end())
+ continue;
+ if (Debug == true)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": File " << filename
+ << " wasn't in the list for the first parsed hash! (patches)" << std::endl;
+ break;
}
}
-
- // Nothing found, report and return false
- // Failing here is ok, if we return false later, the full
- // IndexFile is queued
- if(Debug)
- std::clog << "Can't find a patch in the index file" << std::endl;
- return false;
+
+ bool foundStart = false;
+ for (std::vector<DiffInfo>::iterator cur = available_patches.begin();
+ cur != available_patches.end(); ++cur)
+ {
+ if (LocalHashes != cur->result_hashes)
+ continue;
+
+ available_patches.erase(available_patches.begin(), cur);
+ foundStart = true;
+ break;
+ }
+
+ if (foundStart == false || unlikely(available_patches.empty() == true))
+ {
+ if (Debug)
+ std::clog << "pkgAcqDiffIndex: " << IndexDiffFile << ": "
+ << "Couldn't find the start of the patch series." << std::endl;
+ return false;
+ }
+
+ // patching with too many files is rather slow compared to a fast download
+ unsigned long const fileLimit = _config->FindI("Acquire::PDiffs::FileLimit", 0);
+ if (fileLimit != 0 && fileLimit < available_patches.size())
+ {
+ if (Debug)
+ std::clog << "Need " << available_patches.size() << " diffs (Limit is " << fileLimit
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // calculate the size of all patches we have to get
+ // note that all sizes are uncompressed, while we download compressed files
+ unsigned long long patchesSize = 0;
+ for (std::vector<DiffInfo>::const_iterator cur = available_patches.begin();
+ cur != available_patches.end(); ++cur)
+ patchesSize += cur->patch_size;
+ unsigned long long const sizeLimit = ServerSize * _config->FindI("Acquire::PDiffs::SizeLimit", 100);
+ if (false && sizeLimit > 0 && (sizeLimit/100) < patchesSize)
+ {
+ if (Debug)
+ std::clog << "Need " << patchesSize << " bytes (Limit is " << sizeLimit/100
+ << ") so fallback to complete download" << std::endl;
+ return false;
+ }
+
+ // we have something, queue the diffs
+ string::size_type const last_space = Description.rfind(" ");
+ if(last_space != string::npos)
+ Description.erase(last_space, Description.size()-last_space);
+
+ /* decide if we should download patches one by one or in one go:
+ The first is good if the server merges patches, but many don't so client
+ based merging can be attempt in which case the second is better.
+ "bad things" will happen if patches are merged on the server,
+ but client side merging is attempt as well */
+ bool pdiff_merge = _config->FindB("Acquire::PDiffs::Merge", true);
+ if (pdiff_merge == true)
+ {
+ // reprepro adds this flag if it has merged patches on the server
+ std::string const precedence = Tags.FindS("X-Patch-Precedence");
+ pdiff_merge = (precedence != "merged");
+ }
+
+ if (pdiff_merge == false)
+ {
+ new pkgAcqIndexDiffs(Owner, Target, ExpectedHashes, MetaIndexParser,
+ available_patches);
+ }
+ else
+ {
+ std::vector<pkgAcqIndexMergeDiffs*> *diffs = new std::vector<pkgAcqIndexMergeDiffs*>(available_patches.size());
+ for(size_t i = 0; i < available_patches.size(); ++i)
+ (*diffs)[i] = new pkgAcqIndexMergeDiffs(Owner, Target,
+ ExpectedHashes,
+ MetaIndexParser,
+ available_patches[i],
+ diffs);
+ }
+
+ Complete = false;
+ Status = StatDone;
+ Dequeue();
+ return true;
}
/*}}}*/
void pkgAcqDiffIndex::Failed(string Message,pkgAcquire::MethodConfig * /*Cnf*/)/*{{{*/
@@ -613,7 +719,7 @@ void pkgAcqDiffIndex::Done(string Message,unsigned long long Size,HashStringList
DestFile = FinalFile;
if(!ParseDiffIndex(DestFile))
- return Failed("", NULL);
+ return Failed("Parsing pdiff Index failed", NULL);
Complete = true;
Status = StatDone;
@@ -630,12 +736,10 @@ pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire *Owner,
struct IndexTarget const * const Target,
HashStringList const &ExpectedHashes,
indexRecords *MetaIndexParser,
- string ServerSha1,
vector<DiffInfo> diffs)
: pkgAcqBaseIndex(Owner, Target, ExpectedHashes, MetaIndexParser),
- available_patches(diffs), ServerSha1(ServerSha1)
+ available_patches(diffs)
{
-
DestFile = _config->FindDir("Dir::State::lists") + "partial/";
DestFile += URItoFileName(Target->URI);
@@ -710,15 +814,21 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
FinalFile += URItoFileName(RealURI);
FileFd fd(FinalFile, FileFd::ReadOnly);
- SHA1Summation SHA1;
- SHA1.AddFD(fd);
- string local_sha1 = string(SHA1.Result());
+ Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
if(Debug)
- std::clog << "QueueNextDiff: "
- << FinalFile << " (" << local_sha1 << ")"<<std::endl;
+ std::clog << "QueueNextDiff: " << FinalFile << " (" << LocalHashes.find(NULL)->toStr() << ")" << std::endl;
+
+ if (unlikely(LocalHashes.usable() == false || ExpectedHashes.usable() == false))
+ {
+ Failed("Local/Expected hashes are not usable", NULL);
+ return false;
+ }
// final file reached before all patches are applied
- if(local_sha1 == ServerSha1)
+ if(LocalHashes == ExpectedHashes)
{
Finish(true);
return true;
@@ -726,10 +836,10 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
// remove all patches until the next matching patch is found
// this requires the Index file to be ordered
- for(vector<DiffInfo>::iterator I=available_patches.begin();
+ for(vector<DiffInfo>::iterator I = available_patches.begin();
available_patches.empty() == false &&
I != available_patches.end() &&
- I->sha1 != local_sha1;
+ I->result_hashes != LocalHashes;
++I)
{
available_patches.erase(I);
@@ -738,7 +848,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
// error checking and falling back if no patch was found
if(available_patches.empty() == true)
{
- Failed("", NULL);
+ Failed("No patches left to reach target", NULL);
return false;
}
@@ -750,7 +860,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
if(Debug)
std::clog << "pkgAcqIndexDiffs::QueueNextDiff(): " << Desc.URI << std::endl;
-
+
QueueURI(Desc);
return true;
@@ -770,6 +880,17 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringLi
// success in downloading a diff, enter ApplyDiff state
if(State == StateFetchDiff)
{
+ FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
+ class Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if (fd.Size() != available_patches[0].patch_size ||
+ available_patches[0].patch_hashes != LocalHashes)
+ {
+ Failed("Patch has Size/Hashsum mismatch", NULL);
+ return;
+ }
// rred excepts the patch as $FinalFile.ed
Rename(DestFile,FinalFile+".ed");
@@ -814,7 +935,7 @@ void pkgAcqIndexDiffs::Done(string Message,unsigned long long Size, HashStringLi
if(available_patches.empty() == false) {
new pkgAcqIndexDiffs(Owner, Target,
ExpectedHashes, MetaIndexParser,
- ServerSha1, available_patches);
+ available_patches);
return Finish();
} else
return Finish(true);
@@ -886,6 +1007,17 @@ void pkgAcqIndexMergeDiffs::Done(string Message,unsigned long long Size,HashStri
if (State == StateFetchDiff)
{
+ FileFd fd(DestFile, FileFd::ReadOnly, FileFd::Gzip);
+ class Hashes LocalHashesCalc;
+ LocalHashesCalc.AddFD(fd);
+ HashStringList const LocalHashes = LocalHashesCalc.GetHashStringList();
+
+ if (fd.Size() != patch.patch_size || patch.patch_hashes != LocalHashes)
+ {
+ Failed("Patch has Size/Hashsum mismatch", NULL);
+ return;
+ }
+
// rred expects the patch as $FinalFile.ed.$patchname.gz
Rename(DestFile, FinalFile + ".ed." + patch.file + ".gz");
diff --git a/apt-pkg/acquire-item.h b/apt-pkg/acquire-item.h
index 513c516cd..18d72ca40 100644
--- a/apt-pkg/acquire-item.h
+++ b/apt-pkg/acquire-item.h
@@ -328,11 +328,17 @@ struct DiffInfo {
/** The filename of the diff. */
std::string file;
- /** The sha1 hash of the diff. */
- std::string sha1;
+ /** The hashes of the diff */
+ HashStringList result_hashes;
- /** The size of the diff. */
- unsigned long size;
+ /** The hashes of the file after the diff is applied */
+ HashStringList patch_hashes;
+
+ /** The size of the file after the diff is applied */
+ unsigned long long result_size;
+
+ /** The size of the diff itself */
+ unsigned long long patch_size;
};
/*}}}*/
/** \brief An item that is responsible for fetching a SubIndex {{{
@@ -616,9 +622,6 @@ class pkgAcqIndexDiffs : public pkgAcqBaseIndex
*/
std::vector<DiffInfo> available_patches;
- /** Stop applying patches when reaching that sha1 */
- std::string ServerSha1;
-
/** The current status of this patch. */
enum DiffState
{
@@ -662,12 +665,10 @@ class pkgAcqIndexDiffs : public pkgAcqBaseIndex
*
* \param ShortDesc A brief description of this item.
*
- * \param ExpectedHashes The expected md5sum of the completely
+ * \param ExpectedHashes The expected hashsums of the completely
* reconstructed package index file; the index file will be tested
* against this value when it is entirely reconstructed.
*
- * \param ServerSha1 is the sha1sum of the current file on the server
- *
* \param diffs The remaining diffs from the index of diffs. They
* should be ordered so that each diff appears before any diff
* that depends on it.
@@ -676,7 +677,6 @@ class pkgAcqIndexDiffs : public pkgAcqBaseIndex
struct IndexTarget const * const Target,
HashStringList const &ExpectedHash,
indexRecords *MetaIndexParser,
- std::string ServerSha1,
std::vector<DiffInfo> diffs=std::vector<DiffInfo>());
};
/*}}}*/
diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc
index 199e395f6..417982343 100644
--- a/apt-pkg/contrib/hashes.cc
+++ b/apt-pkg/contrib/hashes.cc
@@ -209,11 +209,11 @@ bool HashStringList::operator==(HashStringList const &other) const /*{{{*/
std::string const forcedType = _config->Find("Acquire::ForceHash", "");
if (forcedType.empty() == false)
{
- HashString const * const hs = other.find(forcedType);
+ HashString const * const hs = find(forcedType);
HashString const * const ohs = other.find(forcedType);
if (hs == NULL || ohs == NULL)
return false;
- return hs == ohs;
+ return *hs == *ohs;
}
short matches = 0;
for (const_iterator hs = begin(); hs != end(); ++hs)
diff --git a/debian/libapt-pkg4.13.symbols b/debian/libapt-pkg4.13.symbols
index 269406a2c..0f5ce5735 100644
--- a/debian/libapt-pkg4.13.symbols
+++ b/debian/libapt-pkg4.13.symbols
@@ -1636,7 +1636,6 @@ libapt-pkg.so.4.13 libapt-pkg4.13 #MINVER#
(c++)"pkgAcqFile::pkgAcqFile(pkgAcquire*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, HashStringList const&, unsigned long long, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 1.1~exp1
(c++)"pkgAcqIndex::Custom600Headers() const@Base" 1.1~exp1
(c++)"pkgAcqIndexDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
- (c++)"pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire*, IndexTarget const*, HashStringList const&, indexRecords*, std::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::vector<DiffInfo, std::allocator<DiffInfo> >)@Base" 1.1~exp1
(c++)"pkgAcqIndex::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
(c++)"pkgAcqIndex::InitByHashIfNeeded(std::basic_string<char, std::char_traits<char>, std::allocator<char> >)@Base" 1.1~exp1
(c++)"pkgAcqIndexMergeDiffs::Done(std::basic_string<char, std::char_traits<char>, std::allocator<char> >, unsigned long long, HashStringList const&, pkgAcquire::MethodConfig*)@Base" 1.1~exp1
@@ -1668,6 +1667,8 @@ libapt-pkg.so.4.13 libapt-pkg4.13 #MINVER#
(c++)"typeinfo for pkgAcqBaseIndex@Base" 1.1~exp1
(c++)"typeinfo name for pkgAcqBaseIndex@Base" 1.1~exp1
(c++)"vtable for pkgAcqBaseIndex@Base" 1.1~exp1
+ (c++)"DiffInfo::DiffInfo(DiffInfo const&)@Base" 1.1~exp4
+ (c++)"pkgAcqIndexDiffs::pkgAcqIndexDiffs(pkgAcquire*, IndexTarget const*, HashStringList const&, indexRecords*, std::vector<DiffInfo, std::allocator<DiffInfo> >)@Base" 1.1~exp4
### mixed stuff
(c++)"GetListOfFilesInDir(std::basic_string<char, std::char_traits<char>, std::allocator<char> > const&, bool)@Base" 0.8.16~exp13
(c++)"pkgCache::DepIterator::IsIgnorable(pkgCache::PkgIterator const&) const@Base" 0.8.16~exp10
diff --git a/test/integration/test-pdiff-usage b/test/integration/test-pdiff-usage
index 74749d6ab..0d5261429 100755
--- a/test/integration/test-pdiff-usage
+++ b/test/integration/test-pdiff-usage
@@ -76,8 +76,15 @@ SHA1-History:
9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
$(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
- $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+SHA256-Current: $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
generatereleasefiles '+1hour'
signreleasefiles
find aptarchive -name 'Packages*' -type f -delete
@@ -119,9 +126,18 @@ SHA1-History:
$(sha1sum ${PKGFILE} | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}) $(basename ${PATCHFILE})
$(sha1sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) $(basename ${PATCHFILE2})
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
$(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
- $(sha1sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX
+ $(sha1sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})
+SHA256-Current: $(sha256sum aptarchive/Packages | cut -d' ' -f 1) $(stat -c%s aptarchive/Packages)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+ $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new) $(basename ${PATCHFILE2})
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+ $(sha256sum ${PATCHFILE2} | cut -d' ' -f 1) $(stat -c%s ${PATCHFILE2}) $(basename ${PATCHFILE2})" > $PATCHINDEX
generatereleasefiles '+2hour'
signreleasefiles
cp -a aptarchive/Packages Packages-future
@@ -147,8 +163,15 @@ SHA1-History:
9f4148e06d7faa37062994ff10d0c842d7017513 33053002 2010-08-18-2013.28
$(sha1sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
SHA1-Patches:
- 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-0814.28
- $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
+ 7651fc0ac57cd83d41c63195a9342e2db5650257 19722 2010-08-18-2013.28
+ $(sha1sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)
+SHA256-Current: $(sha256sum ${PKGFILE}-new | cut -d' ' -f 1) $(stat -c%s ${PKGFILE}-new)
+SHA256-History:
+ 01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b 33053002 2010-08-18-2013.28
+ $(sha256sum $PKGFILE | cut -d' ' -f 1) $(stat -c%s $PKGFILE) $(basename $PATCHFILE)
+SHA256-Patches:
+ e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 19722 2010-08-18-2013.28
+ $(sha256sum $PATCHFILE | cut -d' ' -f 1) $(stat -c%s $PATCHFILE) $(basename $PATCHFILE)" > $PATCHINDEX
echo 'I am Mallory and I change files' >> $PATCHFILE
cat $PATCHFILE | gzip > ${PATCHFILE}.gz
generatereleasefiles '+1hour'
@@ -165,3 +188,16 @@ testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=1
testrun -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=1
testrun -o Acquire::PDiffs::Merge=0 -o APT::Get::List-Cleanup=0
testrun -o Acquire::PDiffs::Merge=1 -o APT::Get::List-Cleanup=0
+
+sha256sum() {
+ echo '01ba4719c80b6fe911b091a7c05124b64eeece964e09c058ef8f9805daca546b -'
+}
+testrun -o Acquire::PDiffs::Merge=0 -o Acquire::ForceHash=SHA1
+testrun -o Acquire::PDiffs::Merge=1 -o Acquire::ForceHash=SHA1
+
+unset -f sha256sum
+sha1sum() {
+ echo 'adc83b19e793491b1c6ea0fd8b46cd9f32e592fc -'
+}
+testrun -o Acquire::PDiffs::Merge=0 -o Acquire::ForceHash=SHA256
+testrun -o Acquire::PDiffs::Merge=1 -o Acquire::ForceHash=SHA256