summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Vogt <michael.vogt@ubuntu.com>2011-12-19 14:22:05 +0100
committerMichael Vogt <michael.vogt@ubuntu.com>2011-12-19 14:22:05 +0100
commit2f67d29b0c3de8618983055e811e70ee36662f4a (patch)
tree7ab0646d51474b4f3c16c8a984f2f1edd43528d8
parent27da8141d21cfbfc29675510737ee05bdfd4a2b1 (diff)
parent29966fd1cc2bab68777e85b9070dba7821a58d36 (diff)
merged from lp:~donkult/apt/experimental
-rw-r--r--apt-pkg/acquire-item.cc4
-rw-r--r--apt-pkg/contrib/fileutl.cc73
-rw-r--r--apt-pkg/contrib/hashes.cc40
-rw-r--r--apt-pkg/contrib/hashes.h5
-rw-r--r--apt-pkg/contrib/hashsum.cc22
-rw-r--r--apt-pkg/contrib/hashsum_template.h3
-rw-r--r--apt-pkg/deb/debindexfile.cc3
-rw-r--r--ftparchive/cachedb.cc8
-rw-r--r--ftparchive/writer.cc2
-rw-r--r--methods/cdrom.cc2
-rw-r--r--methods/copy.cc2
-rw-r--r--methods/file.cc2
-rw-r--r--methods/ftp.cc2
-rw-r--r--methods/http.cc24
-rw-r--r--methods/https.cc2
-rw-r--r--methods/rred.cc49
-rw-r--r--methods/rsh.cc2
17 files changed, 153 insertions, 92 deletions
diff --git a/apt-pkg/acquire-item.cc b/apt-pkg/acquire-item.cc
index 453fce109..f231c42b4 100644
--- a/apt-pkg/acquire-item.cc
+++ b/apt-pkg/acquire-item.cc
@@ -438,7 +438,7 @@ bool pkgAcqDiffIndex::ParseDiffIndex(string IndexDiffFile) /*{{{*/
FileFd fd(CurrentPackagesFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string const local_sha1 = SHA1.Result();
if(local_sha1 == ServerSha1)
@@ -669,7 +669,7 @@ bool pkgAcqIndexDiffs::QueueNextDiff() /*{{{*/
FileFd fd(FinalFile, FileFd::ReadOnly);
SHA1Summation SHA1;
- SHA1.AddFD(fd.Fd(), fd.Size());
+ SHA1.AddFD(fd);
string local_sha1 = string(SHA1.Result());
if(Debug)
std::clog << "QueueNextDiff: "
diff --git a/apt-pkg/contrib/fileutl.cc b/apt-pkg/contrib/fileutl.cc
index a98c2cb85..b350973af 100644
--- a/apt-pkg/contrib/fileutl.cc
+++ b/apt-pkg/contrib/fileutl.cc
@@ -75,7 +75,9 @@ class FileFdPrivate {
bool pipe;
APT::Configuration::Compressor compressor;
unsigned int openmode;
- FileFdPrivate() : gz(NULL), compressed_fd(-1), compressor_pid(-1), pipe(false) {};
+ unsigned long long seekpos;
+ FileFdPrivate() : gz(NULL), compressed_fd(-1), compressor_pid(-1), pipe(false),
+ openmode(0), seekpos(0) {};
};
// RunScripts - Run a set of scripts from a configuration subtree /*{{{*/
@@ -1065,6 +1067,7 @@ bool FileFd::Read(void *To,unsigned long long Size,unsigned long long *Actual)
To = (char *)To + Res;
Size -= Res;
+ d->seekpos += Res;
if (Actual != 0)
*Actual += Res;
}
@@ -1097,15 +1100,19 @@ char* FileFd::ReadLine(char *To, unsigned long long const Size)
#endif
unsigned long long read = 0;
- if (Read(To, Size, &read) == false)
+ while ((Size - 1) != read)
+ {
+ unsigned long long done = 0;
+ if (Read(To + read, 1, &done) == false)
+ return NULL;
+ if (done == 0)
+ break;
+ if (To[read++] == '\n')
+ break;
+ }
+ if (read == 0)
return NULL;
- char* c = To;
- for (; *c != '\n' && *c != '\0' && read != 0; --read, ++c)
- ; // find the end of the line
- if (*c != '\0')
- *c = '\0';
- if (read != 0)
- Seek(Tell() - read);
+ To[read] = '\0';
return To;
}
/*}}}*/
@@ -1134,6 +1141,7 @@ bool FileFd::Write(const void *From,unsigned long long Size)
From = (char *)From + Res;
Size -= Res;
+ d->seekpos += Res;
}
while (Res > 0 && Size > 0);
@@ -1151,6 +1159,13 @@ bool FileFd::Seek(unsigned long long To)
{
if (d->pipe == true)
{
+ // Our poor man seeking in pipes is costly, so try to avoid it
+ unsigned long long seekpos = Tell();
+ if (seekpos == To)
+ return true;
+ else if (seekpos < To)
+ return Skip(To - seekpos);
+
if ((d->openmode & ReadOnly) != ReadOnly)
return _error->Error("Reopen is only implemented for read-only files!");
close(iFd);
@@ -1173,6 +1188,8 @@ bool FileFd::Seek(unsigned long long To)
if (To != 0)
return Skip(To);
+
+ d->seekpos = To;
return true;
}
int res;
@@ -1187,7 +1204,8 @@ bool FileFd::Seek(unsigned long long To)
Flags |= Fail;
return _error->Error("Unable to seek to %llu", To);
}
-
+
+ d->seekpos = To;
return true;
}
/*}}}*/
@@ -1196,6 +1214,20 @@ bool FileFd::Seek(unsigned long long To)
/* */
bool FileFd::Skip(unsigned long long Over)
{
+ if (d->pipe == true)
+ {
+ d->seekpos += Over;
+ char buffer[1024];
+ while (Over != 0)
+ {
+ unsigned long long toread = std::min((unsigned long long) sizeof(buffer), Over);
+ if (Read(buffer, toread) == false)
+ return _error->Error("Unable to seek ahead %llu",Over);
+ Over -= toread;
+ }
+ return true;
+ }
+
int res;
#if APT_USE_ZLIB
if (d->gz != NULL)
@@ -1208,7 +1240,8 @@ bool FileFd::Skip(unsigned long long Over)
Flags |= Fail;
return _error->Error("Unable to seek ahead %llu",Over);
}
-
+ d->seekpos = res;
+
return true;
}
/*}}}*/
@@ -1236,6 +1269,13 @@ bool FileFd::Truncate(unsigned long long To)
/* */
unsigned long long FileFd::Tell()
{
+ // In theory, we could just return seekpos here always instead of
+ // seeking around, but not all users of FileFd use always Seek() and co
+ // so d->seekpos isn't always true and we can just use it as a hint if
+ // we have nothing else, but not always as an authority…
+ if (d->pipe == true)
+ return d->seekpos;
+
off_t Res;
#if APT_USE_ZLIB
if (d->gz != NULL)
@@ -1245,6 +1285,7 @@ unsigned long long FileFd::Tell()
Res = lseek(iFd,0,SEEK_CUR);
if (Res == (off_t)-1)
_error->Errno("lseek","Failed to determine the current file position");
+ d->seekpos = Res;
return Res;
}
/*}}}*/
@@ -1281,15 +1322,14 @@ unsigned long long FileFd::Size()
// so we 'read' the content and 'seek' back - see there
if (d->pipe == true)
{
- // FIXME: If we have read first and then FileSize() the report is wrong
- size = 0;
+ unsigned long long const oldSeek = Tell();
char ignore[1000];
unsigned long long read = 0;
do {
Read(ignore, sizeof(ignore), &read);
- size += read;
} while(read != 0);
- Seek(0);
+ size = Tell();
+ Seek(oldSeek);
}
#if APT_USE_ZLIB
// only check gzsize if we are actually a gzip file, just checking for
@@ -1301,7 +1341,6 @@ unsigned long long FileFd::Size()
* this ourselves; the original (uncompressed) file size is the last 32
* bits of the file */
// FIXME: Size for gz-files is limited by 32bit… no largefile support
- off_t orig_pos = lseek(iFd, 0, SEEK_CUR);
if (lseek(iFd, -4, SEEK_END) < 0)
return _error->Errno("lseek","Unable to seek to end of gzipped file");
size = 0L;
@@ -1315,7 +1354,7 @@ unsigned long long FileFd::Size()
size = tmp_size;
#endif
- if (lseek(iFd, orig_pos, SEEK_SET) < 0)
+ if (lseek(iFd, d->seekpos, SEEK_SET) < 0)
return _error->Errno("lseek","Unable to seek in gzipped file");
return size;
}
diff --git a/apt-pkg/contrib/hashes.cc b/apt-pkg/contrib/hashes.cc
index 05001f042..e1a431823 100644
--- a/apt-pkg/contrib/hashes.cc
+++ b/apt-pkg/contrib/hashes.cc
@@ -61,25 +61,25 @@ bool HashString::VerifyFile(std::string filename) const /*{{{*/
if(Type == "MD5Sum")
{
MD5Summation MD5;
- MD5.AddFD(Fd.Fd(), Fd.Size());
+ MD5.AddFD(Fd);
fileHash = (std::string)MD5.Result();
}
else if (Type == "SHA1")
{
SHA1Summation SHA1;
- SHA1.AddFD(Fd.Fd(), Fd.Size());
+ SHA1.AddFD(Fd);
fileHash = (std::string)SHA1.Result();
}
else if (Type == "SHA256")
{
SHA256Summation SHA256;
- SHA256.AddFD(Fd.Fd(), Fd.Size());
+ SHA256.AddFD(Fd);
fileHash = (std::string)SHA256.Result();
}
else if (Type == "SHA512")
{
SHA512Summation SHA512;
- SHA512.AddFD(Fd.Fd(), Fd.Size());
+ SHA512.AddFD(Fd);
fileHash = (std::string)SHA512.Result();
}
Fd.Close();
@@ -135,5 +135,35 @@ bool Hashes::AddFD(int const Fd,unsigned long long Size, bool const addMD5,
}
return true;
}
+bool Hashes::AddFD(FileFd &Fd,unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512)
+{
+ unsigned char Buf[64*64];
+ bool const ToEOF = (Size == 0);
+ while (Size != 0 || ToEOF)
+ {
+ unsigned long long n = sizeof(Buf);
+ if (!ToEOF) n = std::min(Size, n);
+ unsigned long long a = 0;
+ if (Fd.Read(Buf, n, &a) == false) // error
+ return false;
+ if (ToEOF == false)
+ {
+ if (a != n) // short read
+ return false;
+ }
+ else if (a == 0) // EOF
+ break;
+ Size -= a;
+ if (addMD5 == true)
+ MD5.Add(Buf, a);
+ if (addSHA1 == true)
+ SHA1.Add(Buf, a);
+ if (addSHA256 == true)
+ SHA256.Add(Buf, a);
+ if (addSHA512 == true)
+ SHA512.Add(Buf, a);
+ }
+ return true;
+}
/*}}}*/
-
diff --git a/apt-pkg/contrib/hashes.h b/apt-pkg/contrib/hashes.h
index b206eccb8..0c0b6c6a7 100644
--- a/apt-pkg/contrib/hashes.h
+++ b/apt-pkg/contrib/hashes.h
@@ -17,6 +17,7 @@
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
#include <apt-pkg/sha2.h>
+#include <apt-pkg/fileutl.h>
#include <algorithm>
#include <vector>
@@ -74,6 +75,10 @@ class Hashes
{ return AddFD(Fd, Size, true, true, true, true); };
bool AddFD(int const Fd, unsigned long long Size, bool const addMD5,
bool const addSHA1, bool const addSHA256, bool const addSHA512);
+ inline bool AddFD(FileFd &Fd,unsigned long long Size = 0)
+ { return AddFD(Fd, Size, true, true, true, true); };
+ bool AddFD(FileFd &Fd, unsigned long long Size, bool const addMD5,
+ bool const addSHA1, bool const addSHA256, bool const addSHA512);
inline bool Add(const unsigned char *Beg,const unsigned char *End)
{return Add(Beg,End-Beg);};
};
diff --git a/apt-pkg/contrib/hashsum.cc b/apt-pkg/contrib/hashsum.cc
index ff3b112bb..289e43aa4 100644
--- a/apt-pkg/contrib/hashsum.cc
+++ b/apt-pkg/contrib/hashsum.cc
@@ -25,4 +25,26 @@ bool SummationImplementation::AddFD(int const Fd, unsigned long long Size) {
}
return true;
}
+bool SummationImplementation::AddFD(FileFd &Fd, unsigned long long Size) {
+ unsigned char Buf[64 * 64];
+ bool ToEOF = (Size == 0);
+ while (Size != 0 || ToEOF)
+ {
+ unsigned long long n = sizeof(Buf);
+ if (!ToEOF) n = std::min(Size, n);
+ unsigned long long a = 0;
+ if (Fd.Read(Buf, n, &a) == false) // error
+ return false;
+ if (ToEOF == false)
+ {
+ if (a != n) // short read
+ return false;
+ }
+ else if (a == 0) // EOF
+ break;
+ Size -= a;
+ Add(Buf, a);
+ }
+ return true;
+}
/*}}}*/
diff --git a/apt-pkg/contrib/hashsum_template.h b/apt-pkg/contrib/hashsum_template.h
index 6301ac9d0..51e3b0862 100644
--- a/apt-pkg/contrib/hashsum_template.h
+++ b/apt-pkg/contrib/hashsum_template.h
@@ -10,6 +10,8 @@
#ifndef APTPKG_HASHSUM_TEMPLATE_H
#define APTPKG_HASHSUM_TEMPLATE_H
+#include <apt-pkg/fileutl.h>
+
#include <string>
#include <cstring>
#include <algorithm>
@@ -108,6 +110,7 @@ class SummationImplementation
{ return Add((const unsigned char *)Beg, End - Beg); };
bool AddFD(int Fd, unsigned long long Size = 0);
+ bool AddFD(FileFd &Fd, unsigned long long Size = 0);
};
#endif
diff --git a/apt-pkg/deb/debindexfile.cc b/apt-pkg/deb/debindexfile.cc
index 84791a70a..5dc2a2ac2 100644
--- a/apt-pkg/deb/debindexfile.cc
+++ b/apt-pkg/deb/debindexfile.cc
@@ -600,9 +600,6 @@ bool debStatusIndex::Merge(pkgCacheGenerator &Gen,OpProgress *Prog) const
// Store the IMS information
pkgCache::PkgFileIterator CFile = Gen.GetCurFile();
- struct stat St;
- if (fstat(Pkg.Fd(),&St) != 0)
- return _error->Errno("fstat","Failed to stat");
CFile->Size = Pkg.FileSize();
CFile->mtime = Pkg.ModificationTime();
CFile->Archive = Gen.WriteUniqString("now");
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
index f0bfa2a6d..a8b637a80 100644
--- a/ftparchive/cachedb.cc
+++ b/ftparchive/cachedb.cc
@@ -351,7 +351,7 @@ bool CacheDB::GetMD5(bool const &GenOnly)
return false;
}
MD5Summation MD5;
- if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ if (Fd->Seek(0) == false || MD5.AddFD(*Fd, CurStat.FileSize) == false)
return false;
MD5Res = MD5.Result();
@@ -382,7 +382,7 @@ bool CacheDB::GetSHA1(bool const &GenOnly)
return false;
}
SHA1Summation SHA1;
- if (Fd->Seek(0) == false || SHA1.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ if (Fd->Seek(0) == false || SHA1.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA1Res = SHA1.Result();
@@ -413,7 +413,7 @@ bool CacheDB::GetSHA256(bool const &GenOnly)
return false;
}
SHA256Summation SHA256;
- if (Fd->Seek(0) == false || SHA256.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ if (Fd->Seek(0) == false || SHA256.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA256Res = SHA256.Result();
@@ -444,7 +444,7 @@ bool CacheDB::GetSHA512(bool const &GenOnly)
return false;
}
SHA512Summation SHA512;
- if (Fd->Seek(0) == false || SHA512.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ if (Fd->Seek(0) == false || SHA512.AddFD(*Fd, CurStat.FileSize) == false)
return false;
SHA512Res = SHA512.Result();
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
index 02777713c..159772991 100644
--- a/ftparchive/writer.cc
+++ b/ftparchive/writer.cc
@@ -1035,7 +1035,7 @@ bool ReleaseWriter::DoPackage(string FileName)
CheckSums[NewFileName].size = fd.Size();
Hashes hs;
- hs.AddFD(fd.Fd(), 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
+ hs.AddFD(fd, 0, DoMD5, DoSHA1, DoSHA256, DoSHA512);
if (DoMD5 == true)
CheckSums[NewFileName].MD5 = hs.MD5.Result();
if (DoSHA1 == true)
diff --git a/methods/cdrom.cc b/methods/cdrom.cc
index e7114b168..22d4b9164 100644
--- a/methods/cdrom.cc
+++ b/methods/cdrom.cc
@@ -268,7 +268,7 @@ bool CDROMMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
diff --git a/methods/copy.cc b/methods/copy.cc
index f8d58e479..e81d0022b 100644
--- a/methods/copy.cc
+++ b/methods/copy.cc
@@ -85,7 +85,7 @@ bool CopyMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
diff --git a/methods/file.cc b/methods/file.cc
index 5025c996d..7ed4e6f60 100644
--- a/methods/file.cc
+++ b/methods/file.cc
@@ -83,7 +83,7 @@ bool FileMethod::Fetch(FetchItem *Itm)
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
URIDone(Res);
return true;
diff --git a/methods/ftp.cc b/methods/ftp.cc
index 2ca0ac6f7..ad8a7b828 100644
--- a/methods/ftp.cc
+++ b/methods/ftp.cc
@@ -868,7 +868,7 @@ bool FTPConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
if (Resume != 0)
{
- if (Hash.AddFD(To.Fd(),Resume) == false)
+ if (Hash.AddFD(To,Resume) == false)
{
_error->Errno("read",_("Problem hashing file"));
return false;
diff --git a/methods/http.cc b/methods/http.cc
index 0d81c73ed..b8ed43cd2 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -1007,31 +1007,21 @@ HttpMethod::DealWithHeaders(FetchResult &Res,ServerState *Srv)
FailFile.c_str(); // Make sure we dont do a malloc in the signal handler
FailFd = File->Fd();
FailTime = Srv->Date;
-
- // Set the expected size
- if (Srv->StartPos >= 0)
- {
- Res.ResumePoint = Srv->StartPos;
- if (ftruncate(File->Fd(),Srv->StartPos) < 0)
- _error->Errno("ftruncate", _("Failed to truncate file"));
- }
-
- // Set the start point
- lseek(File->Fd(),0,SEEK_END);
delete Srv->In.Hash;
Srv->In.Hash = new Hashes;
-
- // Fill the Hash if the file is non-empty (resume)
- if (Srv->StartPos > 0)
+
+ // Set the expected size and read file for the hashes
+ if (Srv->StartPos >= 0)
{
- lseek(File->Fd(),0,SEEK_SET);
- if (Srv->In.Hash->AddFD(File->Fd(),Srv->StartPos) == false)
+ Res.ResumePoint = Srv->StartPos;
+ File->Truncate(Srv->StartPos);
+
+ if (Srv->In.Hash->AddFD(*File,Srv->StartPos) == false)
{
_error->Errno("read",_("Problem hashing file"));
return ERROR_NOT_FROM_SERVER;
}
- lseek(File->Fd(),0,SEEK_END);
}
SetNonBlock(File->Fd(),true);
diff --git a/methods/https.cc b/methods/https.cc
index 335699907..317c8a587 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -314,7 +314,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// take hashes
Hashes Hash;
FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd.Fd(), Fd.Size());
+ Hash.AddFD(Fd);
Res.TakeHashes(Hash);
// keep apt updated
diff --git a/methods/rred.cc b/methods/rred.cc
index 2a70a9f91..bf9294d96 100644
--- a/methods/rred.cc
+++ b/methods/rred.cc
@@ -37,13 +37,10 @@ class RredMethod : public pkgAcqMethod {
// return values
enum State {ED_OK, ED_ORDERING, ED_PARSER, ED_FAILURE, MMAP_FAILED};
- State applyFile(FileFd &ed_cmds, FILE *in_file, FILE *out_file,
+ State applyFile(FileFd &ed_cmds, FileFd &in_file, FileFd &out_file,
unsigned long &line, char *buffer, Hashes *hash) const;
- void ignoreLineInFile(FILE *fin, char *buffer) const;
void ignoreLineInFile(FileFd &fin, char *buffer) const;
- void copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines,
- Hashes *hash, char *buffer) const;
- void copyLinesFromFileToFile(FileFd &fin, FILE *fout, unsigned int lines,
+ void copyLinesFromFileToFile(FileFd &fin, FileFd &fout, unsigned int lines,
Hashes *hash, char *buffer) const;
State patchFile(FileFd &Patch, FileFd &From, FileFd &out_file, Hashes *hash) const;
@@ -72,7 +69,7 @@ public:
* \param hash the created file for correctness
* \return the success State of the ed command executor
*/
-RredMethod::State RredMethod::applyFile(FileFd &ed_cmds, FILE *in_file, FILE *out_file,
+RredMethod::State RredMethod::applyFile(FileFd &ed_cmds, FileFd &in_file, FileFd &out_file,
unsigned long &line, char *buffer, Hashes *hash) const {
// get the current command and parse it
if (ed_cmds.ReadLine(buffer, BUF_SIZE) == NULL) {
@@ -178,39 +175,19 @@ RredMethod::State RredMethod::applyFile(FileFd &ed_cmds, FILE *in_file, FILE *ou
return ED_OK;
}
/*}}}*/
-void RredMethod::copyLinesFromFileToFile(FILE *fin, FILE *fout, unsigned int lines,/*{{{*/
- Hashes *hash, char *buffer) const {
- while (0 < lines--) {
- do {
- fgets(buffer, BUF_SIZE, fin);
- size_t const written = fwrite(buffer, 1, strlen(buffer), fout);
- hash->Add((unsigned char*)buffer, written);
- } while (strlen(buffer) == (BUF_SIZE - 1) &&
- buffer[BUF_SIZE - 2] != '\n');
- }
-}
- /*}}}*/
-void RredMethod::copyLinesFromFileToFile(FileFd &fin, FILE *fout, unsigned int lines,/*{{{*/
+void RredMethod::copyLinesFromFileToFile(FileFd &fin, FileFd &fout, unsigned int lines,/*{{{*/
Hashes *hash, char *buffer) const {
while (0 < lines--) {
do {
fin.ReadLine(buffer, BUF_SIZE);
- size_t const written = fwrite(buffer, 1, strlen(buffer), fout);
- hash->Add((unsigned char*)buffer, written);
+ unsigned long long const towrite = strlen(buffer);
+ fout.Write(buffer, towrite);
+ hash->Add((unsigned char*)buffer, towrite);
} while (strlen(buffer) == (BUF_SIZE - 1) &&
buffer[BUF_SIZE - 2] != '\n');
}
}
/*}}}*/
-void RredMethod::ignoreLineInFile(FILE *fin, char *buffer) const { /*{{{*/
- fgets(buffer, BUF_SIZE, fin);
- while (strlen(buffer) == (BUF_SIZE - 1) &&
- buffer[BUF_SIZE - 2] != '\n') {
- fgets(buffer, BUF_SIZE, fin);
- buffer[0] = ' ';
- }
-}
- /*}}}*/
void RredMethod::ignoreLineInFile(FileFd &fin, char *buffer) const { /*{{{*/
fin.ReadLine(buffer, BUF_SIZE);
while (strlen(buffer) == (BUF_SIZE - 1) &&
@@ -223,20 +200,18 @@ void RredMethod::ignoreLineInFile(FileFd &fin, char *buffer) const { /*{{{*/
RredMethod::State RredMethod::patchFile(FileFd &Patch, FileFd &From, /*{{{*/
FileFd &out_file, Hashes *hash) const {
char buffer[BUF_SIZE];
- FILE* fFrom = fdopen(From.Fd(), "r");
- FILE* fTo = fdopen(out_file.Fd(), "w");
/* we do a tail recursion to read the commands in the right order */
unsigned long line = -1; // assign highest possible value
- State const result = applyFile(Patch, fFrom, fTo, line, buffer, hash);
+ State const result = applyFile(Patch, From, out_file, line, buffer, hash);
/* read the rest from infile */
if (result == ED_OK) {
- while (fgets(buffer, BUF_SIZE, fFrom) != NULL) {
- size_t const written = fwrite(buffer, 1, strlen(buffer), fTo);
- hash->Add((unsigned char*)buffer, written);
+ while (From.ReadLine(buffer, BUF_SIZE) != NULL) {
+ unsigned long long const towrite = strlen(buffer);
+ out_file.Write(buffer, towrite);
+ hash->Add((unsigned char*)buffer, towrite);
}
- fflush(fTo);
}
return result;
}
diff --git a/methods/rsh.cc b/methods/rsh.cc
index da9777fc4..d249ae961 100644
--- a/methods/rsh.cc
+++ b/methods/rsh.cc
@@ -305,7 +305,7 @@ bool RSHConn::Get(const char *Path,FileFd &To,unsigned long long Resume,
return false;
if (Resume != 0) {
- if (Hash.AddFD(To.Fd(),Resume) == false) {
+ if (Hash.AddFD(To,Resume) == false) {
_error->Errno("read",_("Problem hashing file"));
return false;
}