summaryrefslogtreecommitdiff
path: root/ftparchive
diff options
context:
space:
mode:
authorMichael Vogt <michael.vogt@ubuntu.com>2006-07-24 22:51:14 +0200
committerMichael Vogt <michael.vogt@ubuntu.com>2006-07-24 22:51:14 +0200
commit2ca0a410fadaa3c01d4573a87c074cb8e64a02c4 (patch)
tree2d3ca8581e85da7239bfc5b8f6d0783bb14f72db /ftparchive
parentc64d1644ae0fd1af92c80a91a6c17b57f0b8f313 (diff)
parenta46272c01ef6fda081250b63ff0b6b76ec671cda (diff)
* merged ajs apt-ftparchive branch
Diffstat (limited to 'ftparchive')
-rw-r--r--ftparchive/cachedb.cc256
-rw-r--r--ftparchive/cachedb.h40
-rw-r--r--ftparchive/writer.cc104
-rw-r--r--ftparchive/writer.h8
4 files changed, 314 insertions, 94 deletions
diff --git a/ftparchive/cachedb.cc b/ftparchive/cachedb.cc
index 9e93dff05..0e6078642 100644
--- a/ftparchive/cachedb.cc
+++ b/ftparchive/cachedb.cc
@@ -19,6 +19,8 @@
#include <apti18n.h>
#include <apt-pkg/error.h>
#include <apt-pkg/md5.h>
+#include <apt-pkg/sha1.h>
+#include <apt-pkg/sha256.h>
#include <apt-pkg/strutl.h>
#include <apt-pkg/configuration.h>
@@ -54,7 +56,7 @@ bool CacheDB::ReadyDB(string DB)
return true;
db_create(&Dbp, NULL, 0);
- if ((err = Dbp->open(Dbp, NULL, DB.c_str(), NULL, DB_HASH,
+ if ((err = Dbp->open(Dbp, NULL, DB.c_str(), NULL, DB_BTREE,
(ReadOnly?DB_RDONLY:DB_CREATE),
0644)) != 0)
{
@@ -67,6 +69,12 @@ bool CacheDB::ReadyDB(string DB)
(ReadOnly?DB_RDONLY:DB_CREATE), 0644);
}
+ // the database format has changed from DB_HASH to DB_BTREE in
+ // apt 0.6.44
+ if (err == EINVAL)
+ {
+ _error->Error(_("DB format is invalid. If you upgraded from a older version of apt, please remove and re-create the database."));
+ }
if (err)
{
Dbp = 0;
@@ -79,48 +87,123 @@ bool CacheDB::ReadyDB(string DB)
return true;
}
/*}}}*/
-// CacheDB::SetFile - Select a file to be working with /*{{{*/
+// CacheDB::OpenFile - Open the filei /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::OpenFile()
+{
+ Fd = new FileFd(FileName,FileFd::ReadOnly);
+ if (_error->PendingError() == true)
+ {
+ delete Fd;
+ Fd = NULL;
+ return false;
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetFileStat - Get stats from the file /*{{{*/
+// ---------------------------------------------------------------------
+/* This gets the size from the database if it's there. If we need
+ * to look at the file, also get the mtime from the file. */
+bool CacheDB::GetFileStat()
+{
+ if ((CurStat.Flags & FlSize) == FlSize)
+ {
+ /* Already worked out the file size */
+ }
+ else
+ {
+ /* Get it from the file. */
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
+ // Stat the file
+ struct stat St;
+ if (fstat(Fd->Fd(),&St) != 0)
+ {
+ return _error->Errno("fstat",
+ _("Failed to stat %s"),FileName.c_str());
+ }
+ CurStat.FileSize = St.st_size;
+ CurStat.mtime = htonl(St.st_mtime);
+ CurStat.Flags |= FlSize;
+ }
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetCurStat - Set the CurStat variable. /*{{{*/
// ---------------------------------------------------------------------
-/* All future actions will be performed against this file */
-bool CacheDB::SetFile(string FileName,struct stat St,FileFd *Fd)
+/* Sets the CurStat variable. Either to 0 if no database is used
+ * or to the value in the database if one is used */
+bool CacheDB::GetCurStat()
{
- delete DebFile;
- DebFile = 0;
- this->FileName = FileName;
- this->Fd = Fd;
- this->FileStat = St;
- FileStat = St;
memset(&CurStat,0,sizeof(CurStat));
- Stats.Bytes += St.st_size;
- Stats.Packages++;
-
- if (DBLoaded == false)
- return true;
+ if (DBLoaded)
+ {
+ /* First see if thre is anything about it
+ in the database */
+ /* Get the flags (and mtime) */
InitQuery("st");
-
// Ensure alignment of the returned structure
Data.data = &CurStat;
Data.ulen = sizeof(CurStat);
Data.flags = DB_DBT_USERMEM;
- // Lookup the stat info and confirm the file is unchanged
- if (Get() == true)
- {
- if (CurStat.mtime != htonl(St.st_mtime))
+ if (Get() == false)
{
- CurStat.mtime = htonl(St.st_mtime);
CurStat.Flags = 0;
- _error->Warning(_("File date has changed %s"),FileName.c_str());
}
+ CurStat.Flags = ntohl(CurStat.Flags);
+ CurStat.FileSize = ntohl(CurStat.FileSize);
}
- else
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetFileInfo - Get all the info about the file /*{{{*/
+// ---------------------------------------------------------------------
+bool CacheDB::GetFileInfo(string FileName, bool DoControl, bool DoContents,
+ bool GenContentsOnly,
+ bool DoMD5, bool DoSHA1, bool DoSHA256)
+{
+ this->FileName = FileName;
+
+ if (GetCurStat() == false)
{
- CurStat.mtime = htonl(St.st_mtime);
- CurStat.Flags = 0;
+ return false;
}
- CurStat.Flags = ntohl(CurStat.Flags);
OldStat = CurStat;
+
+ if (GetFileStat() == false)
+ {
+ delete Fd;
+ Fd = NULL;
+ return false;
+ }
+
+ Stats.Bytes += CurStat.FileSize;
+ Stats.Packages++;
+
+ if (DoControl && LoadControl() == false
+ || DoContents && LoadContents(GenContentsOnly) == false
+ || DoMD5 && GetMD5(false) == false
+ || DoSHA1 && GetSHA1(false) == false
+ || DoSHA256 && GetSHA256(false) == false)
+ {
+ delete Fd;
+ Fd = NULL;
+ delete DebFile;
+ DebFile = NULL;
+ return false;
+ }
+
+ delete Fd;
+ Fd = NULL;
+ delete DebFile;
+ DebFile = NULL;
+
return true;
}
/*}}}*/
@@ -139,6 +222,10 @@ bool CacheDB::LoadControl()
CurStat.Flags &= ~FlControl;
}
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
// Create a deb instance to read the archive
if (DebFile == 0)
{
@@ -183,6 +270,10 @@ bool CacheDB::LoadContents(bool GenOnly)
CurStat.Flags &= ~FlContents;
}
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
// Create a deb instance to read the archive
if (DebFile == 0)
{
@@ -201,10 +292,37 @@ bool CacheDB::LoadContents(bool GenOnly)
return true;
}
/*}}}*/
+
+static string bytes2hex(uint8_t *bytes, size_t length) {
+ char space[65];
+ if (length * 2 > sizeof(space) - 1) length = (sizeof(space) - 1) / 2;
+ for (size_t i = 0; i < length; i++)
+ snprintf(&space[i*2], 3, "%02x", bytes[i]);
+ return string(space);
+}
+
+static inline unsigned char xdig2num(char dig) {
+ if (isdigit(dig)) return dig - '0';
+ if ('a' <= dig && dig <= 'f') return dig - 'a' + 10;
+ if ('A' <= dig && dig <= 'F') return dig - 'A' + 10;
+ return 0;
+}
+
+static void hex2bytes(uint8_t *bytes, const char *hex, int length) {
+ while (length-- > 0) {
+ *bytes = 0;
+ if (isxdigit(hex[0]) && isxdigit(hex[1])) {
+ *bytes = xdig2num(hex[0]) * 16 + xdig2num(hex[1]);
+ hex += 2;
+ }
+ bytes++;
+ }
+}
+
// CacheDB::GetMD5 - Get the MD5 hash /*{{{*/
// ---------------------------------------------------------------------
/* */
-bool CacheDB::GetMD5(string &MD5Res,bool GenOnly)
+bool CacheDB::GetMD5(bool GenOnly)
{
// Try to read the control information out of the DB.
if ((CurStat.Flags & FlMD5) == FlMD5)
@@ -212,28 +330,88 @@ bool CacheDB::GetMD5(string &MD5Res,bool GenOnly)
if (GenOnly == true)
return true;
- InitQuery("m5");
- if (Get() == true)
- {
- MD5Res = string((char *)Data.data,Data.size);
+ MD5Res = bytes2hex(CurStat.MD5, sizeof(CurStat.MD5));
return true;
}
- CurStat.Flags &= ~FlMD5;
- }
- Stats.MD5Bytes += FileStat.st_size;
+ Stats.MD5Bytes += CurStat.FileSize;
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
MD5Summation MD5;
- if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),FileStat.st_size) == false)
+ if (Fd->Seek(0) == false || MD5.AddFD(Fd->Fd(),CurStat.FileSize) == false)
return false;
MD5Res = MD5.Result();
- InitQuery("m5");
- if (Put(MD5Res.c_str(),MD5Res.length()) == true)
+ hex2bytes(CurStat.MD5, MD5Res.data(), sizeof(CurStat.MD5));
CurStat.Flags |= FlMD5;
return true;
}
/*}}}*/
+// CacheDB::GetSHA1 - Get the SHA1 hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::GetSHA1(bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlSHA1) == FlSHA1)
+ {
+ if (GenOnly == true)
+ return true;
+
+ SHA1Res = bytes2hex(CurStat.SHA1, sizeof(CurStat.SHA1));
+ return true;
+ }
+
+ Stats.SHA1Bytes += CurStat.FileSize;
+
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
+ SHA1Summation SHA1;
+ if (Fd->Seek(0) == false || SHA1.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ return false;
+
+ SHA1Res = SHA1.Result();
+ hex2bytes(CurStat.SHA1, SHA1Res.data(), sizeof(CurStat.SHA1));
+ CurStat.Flags |= FlSHA1;
+ return true;
+}
+ /*}}}*/
+// CacheDB::GetSHA256 - Get the SHA256 hash /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+bool CacheDB::GetSHA256(bool GenOnly)
+{
+ // Try to read the control information out of the DB.
+ if ((CurStat.Flags & FlSHA256) == FlSHA256)
+ {
+ if (GenOnly == true)
+ return true;
+
+ SHA256Res = bytes2hex(CurStat.SHA256, sizeof(CurStat.SHA256));
+ return true;
+ }
+
+ Stats.SHA256Bytes += CurStat.FileSize;
+
+ if (Fd == NULL && OpenFile() == false)
+ {
+ return false;
+ }
+ SHA256Summation SHA256;
+ if (Fd->Seek(0) == false || SHA256.AddFD(Fd->Fd(),CurStat.FileSize) == false)
+ return false;
+
+ SHA256Res = SHA256.Result();
+ hex2bytes(CurStat.SHA256, SHA256Res.data(), sizeof(CurStat.SHA256));
+ CurStat.Flags |= FlSHA256;
+ return true;
+}
+ /*}}}*/
// CacheDB::Finish - Write back the cache structure /*{{{*/
// ---------------------------------------------------------------------
/* */
@@ -246,9 +424,12 @@ bool CacheDB::Finish()
// Write the stat information
CurStat.Flags = htonl(CurStat.Flags);
+ CurStat.FileSize = htonl(CurStat.FileSize);
InitQuery("st");
Put(&CurStat,sizeof(CurStat));
CurStat.Flags = ntohl(CurStat.Flags);
+ CurStat.FileSize = ntohl(CurStat.FileSize);
+
return true;
}
/*}}}*/
@@ -278,7 +459,6 @@ bool CacheDB::Clean()
{
if (stringcmp((char *)Key.data,Colon,"st") == 0 ||
stringcmp((char *)Key.data,Colon,"cn") == 0 ||
- stringcmp((char *)Key.data,Colon,"m5") == 0 ||
stringcmp((char *)Key.data,Colon,"cl") == 0)
{
if (FileExists(string(Colon+1,(const char *)Key.data+Key.size)) == true)
diff --git a/ftparchive/cachedb.h b/ftparchive/cachedb.h
index 1b043e1aa..afa22213a 100644
--- a/ftparchive/cachedb.h
+++ b/ftparchive/cachedb.h
@@ -44,7 +44,7 @@ class CacheDB
memset(&Key,0,sizeof(Key));
memset(&Data,0,sizeof(Data));
Key.data = TmpKey;
- Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",Type,FileName.c_str());
+ Key.size = snprintf(TmpKey,sizeof(TmpKey),"%s:%s",FileName.c_str(), Type);
}
inline bool Get()
@@ -64,19 +64,31 @@ class CacheDB
}
return true;
}
+ bool OpenFile();
+ bool GetFileStat();
+ bool GetCurStat();
+ bool LoadControl();
+ bool LoadContents(bool GenOnly);
+ bool GetMD5(bool GenOnly);
+ bool GetSHA1(bool GenOnly);
+ bool GetSHA256(bool GenOnly);
// Stat info stored in the DB, Fixed types since it is written to disk.
- enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2)};
+ enum FlagList {FlControl = (1<<0),FlMD5=(1<<1),FlContents=(1<<2),
+ FlSize=(1<<3), FlSHA1=(1<<4), FlSHA256=(1<<5)};
struct StatStore
{
- time_t mtime;
uint32_t Flags;
+ uint32_t mtime;
+ uint32_t FileSize;
+ uint8_t MD5[16];
+ uint8_t SHA1[20];
+ uint8_t SHA256[32];
} CurStat;
struct StatStore OldStat;
// 'set' state
string FileName;
- struct stat FileStat;
FileFd *Fd;
debDebFile *DebFile;
@@ -85,34 +97,42 @@ class CacheDB
// Data collection helpers
debDebFile::MemControlExtract Control;
ContentsExtract Contents;
+ string MD5Res;
+ string SHA1Res;
+ string SHA256Res;
// Runtime statistics
struct Stats
{
double Bytes;
double MD5Bytes;
+ double SHA1Bytes;
+ double SHA256Bytes;
unsigned long Packages;
unsigned long Misses;
unsigned long DeLinkBytes;
- inline void Add(const Stats &S) {Bytes += S.Bytes; MD5Bytes += S.MD5Bytes;
+ inline void Add(const Stats &S) {
+ Bytes += S.Bytes; MD5Bytes += S.MD5Bytes; SHA1Bytes += S.SHA1Bytes;
+ SHA256Bytes += S.SHA256Bytes;
Packages += S.Packages; Misses += S.Misses; DeLinkBytes += S.DeLinkBytes;};
- Stats() : Bytes(0), MD5Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {};
+ Stats() : Bytes(0), MD5Bytes(0), SHA1Bytes(0), SHA256Bytes(0), Packages(0), Misses(0), DeLinkBytes(0) {};
} Stats;
bool ReadyDB(string DB);
inline bool DBFailed() {return Dbp != 0 && DBLoaded == false;};
inline bool Loaded() {return DBLoaded == true;};
+ inline off_t GetFileSize(void) {return CurStat.FileSize;}
+
bool SetFile(string FileName,struct stat St,FileFd *Fd);
- bool LoadControl();
- bool LoadContents(bool GenOnly);
- bool GetMD5(string &MD5Res,bool GenOnly);
+ bool GetFileInfo(string FileName, bool DoControl, bool DoContents,
+ bool GenContentsOnly, bool DoMD5, bool DoSHA1, bool DoSHA256);
bool Finish();
bool Clean();
- CacheDB(string DB) : Dbp(0), DebFile(0) {ReadyDB(DB);};
+ CacheDB(string DB) : Dbp(0), Fd(NULL), DebFile(0) {ReadyDB(DB);};
~CacheDB() {ReadyDB(string()); delete DebFile;};
};
diff --git a/ftparchive/writer.cc b/ftparchive/writer.cc
index fc9ea27d7..ea242d6af 100644
--- a/ftparchive/writer.cc
+++ b/ftparchive/writer.cc
@@ -23,6 +23,7 @@
#include <apt-pkg/configuration.h>
#include <apt-pkg/md5.h>
#include <apt-pkg/sha1.h>
+#include <apt-pkg/sha256.h>
#include <apt-pkg/deblistparser.h>
#include <sys/types.h>
@@ -70,7 +71,7 @@ FTWScanner::FTWScanner()
// ---------------------------------------------------------------------
/* This is the FTW scanner, it processes each directory element in the
directory tree. */
-int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag)
+int FTWScanner::ScannerFTW(const char *File,const struct stat *sb,int Flag)
{
if (Flag == FTW_DNR)
{
@@ -85,6 +86,14 @@ int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag)
if (Flag != FTW_F)
return 0;
+ return ScannerFile(File, true);
+}
+ /*}}}*/
+// FTWScanner::ScannerFile - File Scanner /*{{{*/
+// ---------------------------------------------------------------------
+/* */
+int FTWScanner::ScannerFile(const char *File, bool ReadLink)
+{
const char *LastComponent = strrchr(File, '/');
if (LastComponent == NULL)
LastComponent = File;
@@ -105,7 +114,8 @@ int FTWScanner::Scanner(const char *File,const struct stat *sb,int Flag)
given are not links themselves. */
char Jnk[2];
Owner->OriginalPath = File;
- if (Owner->RealPath != 0 && readlink(File,Jnk,sizeof(Jnk)) != -1 &&
+ if (ReadLink && Owner->RealPath != 0 &&
+ readlink(File,Jnk,sizeof(Jnk)) != -1 &&
realpath(File,Owner->RealPath) != 0)
Owner->DoPackage(Owner->RealPath);
else
@@ -154,7 +164,7 @@ bool FTWScanner::RecursiveScan(string Dir)
// Do recursive directory searching
Owner = this;
- int Res = ftw(Dir.c_str(),Scanner,30);
+ int Res = ftw(Dir.c_str(),ScannerFTW,30);
// Error treewalking?
if (Res != 0)
@@ -209,12 +219,14 @@ bool FTWScanner::LoadFileList(string Dir,string File)
FileName = Line;
}
+#if 0
struct stat St;
int Flag = FTW_F;
if (stat(FileName,&St) != 0)
Flag = FTW_NS;
+#endif
- if (Scanner(FileName,&St,Flag) != 0)
+ if (ScannerFile(FileName, false) != 0)
break;
}
@@ -227,7 +239,7 @@ bool FTWScanner::LoadFileList(string Dir,string File)
/* */
bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
unsigned long &DeLinkBytes,
- struct stat &St)
+ off_t FileSize)
{
// See if this isn't an internaly prefix'd file name.
if (InternalPrefix.empty() == false &&
@@ -243,7 +255,7 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
NewLine(1);
ioprintf(c1out, _(" DeLink %s [%s]\n"), (OriginalPath + InternalPrefix.length()),
- SizeToStr(St.st_size).c_str());
+ SizeToStr(FileSize).c_str());
c1out << flush;
if (NoLinkAct == false)
@@ -269,7 +281,7 @@ bool FTWScanner::Delink(string &FileName,const char *OriginalPath,
}
}
- DeLinkBytes += St.st_size;
+ DeLinkBytes += FileSize;
if (DeLinkBytes/1024 >= DeLinkLimit)
ioprintf(c1out, _(" DeLink limit of %sB hit.\n"), SizeToStr(DeLinkBytes).c_str());
}
@@ -295,6 +307,8 @@ PackagesWriter::PackagesWriter(string DB,string Overrides,string ExtOverrides,
// Process the command line options
DoMD5 = _config->FindB("APT::FTPArchive::MD5",true);
+ DoSHA1 = _config->FindB("APT::FTPArchive::SHA1",true);
+ DoSHA256 = _config->FindB("APT::FTPArchive::SHA256",true);
DoContents = _config->FindB("APT::FTPArchive::Contents",true);
NoOverride = _config->FindB("APT::FTPArchive::NoOverrideMsg",false);
@@ -343,29 +357,19 @@ bool FTWScanner::SetExts(string Vals)
// PackagesWriter::DoPackage - Process a single package /*{{{*/
// ---------------------------------------------------------------------
/* This method takes a package and gets its control information and
- MD5 then writes out a control record with the proper fields rewritten
- and the path/size/hash appended. */
+ MD5, SHA1 and SHA256 then writes out a control record with the proper fields
+ rewritten and the path/size/hash appended. */
bool PackagesWriter::DoPackage(string FileName)
{
- // Open the archive
- FileFd F(FileName,FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return false;
-
- // Stat the file for later
- struct stat St;
- if (fstat(F.Fd(),&St) != 0)
- return _error->Errno("fstat",_("Failed to stat %s"),FileName.c_str());
-
// Pull all the data we need form the DB
- string MD5Res;
- if (Db.SetFile(FileName,St,&F) == false ||
- Db.LoadControl() == false ||
- (DoContents == true && Db.LoadContents(true) == false) ||
- (DoMD5 == true && Db.GetMD5(MD5Res,false) == false))
+ if (Db.GetFileInfo(FileName, true, DoContents, true, DoMD5, DoSHA1, DoSHA256)
+ == false)
+ {
return false;
+ }
- if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,St) == false)
+ off_t FileSize = Db.GetFileSize();
+ if (Delink(FileName,OriginalPath,Stats.DeLinkBytes,FileSize) == false)
return false;
// Lookup the overide information
@@ -400,7 +404,7 @@ bool PackagesWriter::DoPackage(string FileName)
}
char Size[40];
- sprintf(Size,"%lu",St.st_size);
+ sprintf(Size,"%lu", (unsigned long) FileSize);
// Strip the DirStrip prefix from the FileName and add the PathPrefix
string NewFileName;
@@ -420,7 +424,9 @@ bool PackagesWriter::DoPackage(string FileName)
unsigned int End = 0;
SetTFRewriteData(Changes[End++], "Size", Size);
- SetTFRewriteData(Changes[End++], "MD5sum", MD5Res.c_str());
+ SetTFRewriteData(Changes[End++], "MD5sum", Db.MD5Res.c_str());
+ SetTFRewriteData(Changes[End++], "SHA1", Db.SHA1Res.c_str());
+ SetTFRewriteData(Changes[End++], "SHA256", Db.SHA256Res.c_str());
SetTFRewriteData(Changes[End++], "Filename", NewFileName.c_str());
SetTFRewriteData(Changes[End++], "Priority", OverItem->Priority.c_str());
SetTFRewriteData(Changes[End++], "Status", 0);
@@ -491,6 +497,10 @@ SourcesWriter::SourcesWriter(string BOverrides,string SOverrides,
else
NoOverride = true;
+ // WTF?? The logic above: if we can't read binary overrides, don't even try
+ // reading source overrides. if we can read binary overrides, then say there
+ // are no overrides. THIS MAKES NO SENSE! -- ajt@d.o, 2006/02/28
+
if (ExtOverrides.empty() == false)
SOver.ReadExtraOverride(ExtOverrides);
@@ -607,12 +617,14 @@ bool SourcesWriter::DoPackage(string FileName)
}
auto_ptr<Override::Item> SOverItem(SOver.GetItem(Tags.FindS("Source")));
- const auto_ptr<Override::Item> autoSOverItem(SOverItem);
+ // const auto_ptr<Override::Item> autoSOverItem(SOverItem);
if (SOverItem.get() == 0)
{
+ ioprintf(c1out, _(" %s has no source override entry\n"), Tags.FindS("Source").c_str());
SOverItem = auto_ptr<Override::Item>(BOver.GetItem(Tags.FindS("Source")));
if (SOverItem.get() == 0)
{
+ ioprintf(c1out, _(" %s has no binary override entry either\n"), Tags.FindS("Source").c_str());
SOverItem = auto_ptr<Override::Item>(new Override::Item);
*SOverItem = *OverItem;
}
@@ -657,7 +669,7 @@ bool SourcesWriter::DoPackage(string FileName)
realpath(OriginalPath.c_str(),RealPath) != 0)
{
string RP = RealPath;
- if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St) == false)
+ if (Delink(RP,OriginalPath.c_str(),Stats.DeLinkBytes,St.st_size) == false)
return false;
}
}
@@ -727,26 +739,14 @@ ContentsWriter::ContentsWriter(string DB) :
determine what the package name is. */
bool ContentsWriter::DoPackage(string FileName,string Package)
{
- // Open the archive
- FileFd F(FileName,FileFd::ReadOnly);
- if (_error->PendingError() == true)
- return false;
-
- // Stat the file for later
- struct stat St;
- if (fstat(F.Fd(),&St) != 0)
- return _error->Errno("fstat","Failed too stat %s",FileName.c_str());
-
- // Ready the DB
- if (Db.SetFile(FileName,St,&F) == false ||
- Db.LoadContents(false) == false)
+ if (!Db.GetFileInfo(FileName, Package.empty(), true, false, false, false, false))
+ {
return false;
+ }
// Parse the package name
if (Package.empty() == true)
{
- if (Db.LoadControl() == false)
- return false;
Package = Db.Control.Section.FindS("Package");
}
@@ -896,6 +896,11 @@ bool ReleaseWriter::DoPackage(string FileName)
SHA1.AddFD(fd.Fd(), fd.Size());
CheckSums[NewFileName].SHA1 = SHA1.Result();
+ fd.Seek(0);
+ SHA256Summation SHA256;
+ SHA256.AddFD(fd.Fd(), fd.Size());
+ CheckSums[NewFileName].SHA256 = SHA256.Result();
+
fd.Close();
return true;
@@ -927,5 +932,16 @@ void ReleaseWriter::Finish()
(*I).second.size,
(*I).first.c_str());
}
+
+ fprintf(Output, "SHA256:\n");
+ for(map<string,struct CheckSum>::iterator I = CheckSums.begin();
+ I != CheckSums.end();
+ ++I)
+ {
+ fprintf(Output, " %s %16ld %s\n",
+ (*I).second.SHA256.c_str(),
+ (*I).second.size,
+ (*I).first.c_str());
+ }
}
diff --git a/ftparchive/writer.h b/ftparchive/writer.h
index 16d014ef8..1d47d57ec 100644
--- a/ftparchive/writer.h
+++ b/ftparchive/writer.h
@@ -45,10 +45,11 @@ class FTWScanner
bool NoLinkAct;
static FTWScanner *Owner;
- static int Scanner(const char *File,const struct stat *sb,int Flag);
+ static int ScannerFTW(const char *File,const struct stat *sb,int Flag);
+ static int ScannerFile(const char *File, bool ReadLink);
bool Delink(string &FileName,const char *OriginalPath,
- unsigned long &Bytes,struct stat &St);
+ unsigned long &Bytes,off_t FileSize);
inline void NewLine(unsigned Priority)
{
@@ -84,6 +85,8 @@ class PackagesWriter : public FTWScanner
// Some flags
bool DoMD5;
+ bool DoSHA1;
+ bool DoSHA256;
bool NoOverride;
bool DoContents;
@@ -170,6 +173,7 @@ protected:
{
string MD5;
string SHA1;
+ string SHA256;
// Limited by FileFd::Size()
unsigned long size;
~CheckSum() {};