summaryrefslogtreecommitdiff
path: root/methods/https.cc
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2015-04-11 10:23:52 +0200
committerDavid Kalnischkies <david@kalnischkies.de>2015-04-19 01:13:09 +0200
commit34faa8f7ae2526f46cd1f84bb6962ad06d841e5e (patch)
tree244f91967a9e5bb44677589a7245298f2ecefca7 /methods/https.cc
parentb55ec4203f7a99d380903911f8839aba2a65e27e (diff)
downloadapt-34faa8f7ae2526f46cd1f84bb6962ad06d841e5e.tar.gz
calculate hashes while downloading in https
We do this in HTTP already to give the CPU some exercise while the disk is heavily spinning (or flashing?) to store the data avoiding the need to reread the entire file again later on to calculate the hashes – which happens outside of the eyes of progress reporting, so you might ended up with a bunch of https workers 'stuck' at 100% while they were busy calculating hashes. This is a bummer for everyone using apt as a connection speedtest as the https method works slower now (not really, it just isn't reporting done too early anymore).
Diffstat (limited to 'methods/https.cc')
-rw-r--r--methods/https.cc38
1 files changed, 26 insertions, 12 deletions
diff --git a/methods/https.cc b/methods/https.cc
index 81903b239..c6b75d9ad 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -72,18 +72,18 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
else
me->https->Server->StartPos = 0;
- me->https->File->Truncate(me->https->Server->StartPos);
- me->https->File->Seek(me->https->Server->StartPos);
-
me->Res->LastModified = me->https->Server->Date;
me->Res->Size = me->https->Server->Size;
me->Res->ResumePoint = me->https->Server->StartPos;
// we expect valid data, so tell our caller we get the file now
- if (me->https->Server->Result >= 200 && me->https->Server->Result < 300 &&
- me->https->Server->JunkSize == 0 &&
- me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
- me->https->URIStart(*me->Res);
+ if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
+ {
+ if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+ me->https->URIStart(*me->Res);
+ if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
+ return 0;
+ }
}
else if (me->https->Server->HeaderLine(line) == false)
return 0;
@@ -116,16 +116,31 @@ HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp)
}
}
+ if (me->Server->GetHashes()->Add((unsigned char const * const)buffer, buffer_size) == false)
+ return 0;
+
return buffer_size;
}
// HttpsServerState::HttpsServerState - Constructor /*{{{*/
-HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner)
+HttpsServerState::HttpsServerState(URI Srv,HttpsMethod * Owner) : ServerState(Srv, Owner), Hash(NULL)
{
TimeOut = _config->FindI("Acquire::https::Timeout",TimeOut);
Reset();
}
/*}}}*/
+bool HttpsServerState::InitHashes(HashStringList const &ExpectedHashes) /*{{{*/
+{
+ delete Hash;
+ Hash = new Hashes(ExpectedHashes);
+ return true;
+}
+ /*}}}*/
+APT_PURE Hashes * HttpsServerState::GetHashes() /*{{{*/
+{
+ return Hash;
+}
+ /*}}}*/
void HttpsMethod::SetupProxy() /*{{{*/
{
@@ -365,6 +380,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// go for it - if the file exists, append on it
File = new FileFd(Itm->DestFile, FileFd::WriteAny);
Server = CreateServerState(Itm->Uri);
+ if (Server->InitHashes(Itm->ExpectedHashes) == false)
+ return false;
// keep apt updated
Res.Filename = Itm->DestFile;
@@ -443,10 +460,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
Res.LastModified = resultStat.st_mtime;
// take hashes
- Hashes Hash(Itm->ExpectedHashes);
- FileFd Fd(Res.Filename, FileFd::ReadOnly);
- Hash.AddFD(Fd);
- Res.TakeHashes(Hash);
+ Res.TakeHashes(*(Server->GetHashes()));
// keep apt updated
URIDone(Res);