summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDavid Kalnischkies <david@kalnischkies.de>2016-08-02 14:49:58 +0200
committerDavid Kalnischkies <david@kalnischkies.de>2016-08-10 23:19:44 +0200
commit4bba5a88d0f6afde4414b586b64c48a4851d5324 (patch)
tree458faf656cc61045c5f1f1a3443c2110c3514d55
parent57401c48fadc0c78733a67294f9cc20a57e527c9 (diff)
downloadapt-4bba5a88d0f6afde4414b586b64c48a4851d5324.tar.gz
use the same redirection handling for http and https
cURL which backs our https implementation can handle redirects on its own, but by dealing with them on our own we gain finer control over which redirections will be performed (we don't like https → http) and by whom so that redirections to other hosts correctly spawn a new https method dealing with these instead of letting the current one deal with it.
-rw-r--r--methods/http.cc29
-rw-r--r--methods/http.h1
-rw-r--r--methods/https.cc106
-rw-r--r--methods/server.cc56
-rw-r--r--methods/server.h2
-rwxr-xr-xtest/integration/test-apt-https-no-redirect6
-rwxr-xr-xtest/integration/test-partial-file-support3
7 files changed, 102 insertions, 101 deletions
diff --git a/methods/http.cc b/methods/http.cc
index cf5eae06d..b7a3aa4a1 100644
--- a/methods/http.cc
+++ b/methods/http.cc
@@ -797,3 +797,32 @@ void HttpMethod::RotateDNS() /*{{{*/
::RotateDNS();
}
/*}}}*/
+ServerMethod::DealWithHeadersResult HttpMethod::DealWithHeaders(FetchResult &Res)/*{{{*/
+{
+ auto ret = ServerMethod::DealWithHeaders(Res);
+ if (ret != ServerMethod::FILE_IS_OPEN)
+ return ret;
+
+ // Open the file
+ delete File;
+ File = new FileFd(Queue->DestFile,FileFd::WriteAny);
+ if (_error->PendingError() == true)
+ return ERROR_NOT_FROM_SERVER;
+
+ FailFile = Queue->DestFile;
+ FailFile.c_str(); // Make sure we don't do a malloc in the signal handler
+ FailFd = File->Fd();
+ FailTime = Server->Date;
+
+ if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
+ {
+ _error->Errno("read",_("Problem hashing file"));
+ return ERROR_NOT_FROM_SERVER;
+ }
+ if (Server->StartPos > 0)
+ Res.ResumePoint = Server->StartPos;
+
+ SetNonBlock(File->Fd(),true);
+ return FILE_IS_OPEN;
+}
+ /*}}}*/
diff --git a/methods/http.h b/methods/http.h
index 2ac3b8c9b..e6b11d642 100644
--- a/methods/http.h
+++ b/methods/http.h
@@ -130,6 +130,7 @@ class HttpMethod : public ServerMethod
virtual std::unique_ptr<ServerState> CreateServerState(URI const &uri) APT_OVERRIDE;
virtual void RotateDNS() APT_OVERRIDE;
+ virtual DealWithHeadersResult DealWithHeaders(FetchResult &Res) APT_OVERRIDE;
protected:
std::string AutoDetectProxyCmd;
diff --git a/methods/https.cc b/methods/https.cc
index 7c0c3241d..a5a8a7cd1 100644
--- a/methods/https.cc
+++ b/methods/https.cc
@@ -60,6 +60,7 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
if (line.empty() == true)
{
+ me->https->Server->JunkSize = 0;
if (me->https->Server->Result != 416 && me->https->Server->StartPos != 0)
;
else if (me->https->Server->Result == 416)
@@ -99,11 +100,13 @@ HttpsMethod::parse_header(void *buffer, size_t size, size_t nmemb, void *userp)
// we expect valid data, so tell our caller we get the file now
if (me->https->Server->Result >= 200 && me->https->Server->Result < 300)
{
- if (me->https->Server->JunkSize == 0 && me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
+ if (me->Res->Size != 0 && me->Res->Size > me->Res->ResumePoint)
me->https->URIStart(*me->Res);
if (me->https->Server->AddPartialFileToHashes(*(me->https->File)) == false)
return 0;
}
+ else
+ me->https->Server->JunkSize = std::numeric_limits<decltype(me->https->Server->JunkSize)>::max();
}
else if (me->https->Server->HeaderLine(line) == false)
return 0;
@@ -266,6 +269,7 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// options
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, true);
curl_easy_setopt(curl, CURLOPT_FILETIME, true);
+ curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 0);
// only allow curl to handle https, not the other stuff it supports
curl_easy_setopt(curl, CURLOPT_PROTOCOLS, CURLPROTO_HTTPS);
curl_easy_setopt(curl, CURLOPT_REDIR_PROTOCOLS, CURLPROTO_HTTPS);
@@ -372,10 +376,6 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
curl_easy_setopt(curl, CURLOPT_LOW_SPEED_LIMIT, DL_MIN_SPEED);
curl_easy_setopt(curl, CURLOPT_LOW_SPEED_TIME, timeout);
- // set redirect options and default to 10 redirects
- curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, AllowRedirect);
- curl_easy_setopt(curl, CURLOPT_MAXREDIRS, 10);
-
// debug
if (Debug == true)
curl_easy_setopt(curl, CURLOPT_VERBOSE, true);
@@ -428,6 +428,8 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
// met, CURLINFO_CONDITION_UNMET will be set to 1
long curl_condition_unmet = 0;
curl_easy_getinfo(curl, CURLINFO_CONDITION_UNMET, &curl_condition_unmet);
+ if (curl_condition_unmet == 1)
+ Server->Result = 304;
File->Close();
curl_slist_free_all(headers);
@@ -460,70 +462,54 @@ bool HttpsMethod::Fetch(FetchItem *Itm)
return false;
}
- // server says file not modified
- if (Server->Result == 304 || curl_condition_unmet == 1)
+ switch (DealWithHeaders(Res))
{
- RemoveFile("https", File->Name());
- Res.IMSHit = true;
- Res.LastModified = Itm->LastModified;
- Res.Size = 0;
- URIDone(Res);
- return true;
- }
- Res.IMSHit = false;
+ case ServerMethod::IMS_HIT:
+ URIDone(Res);
+ break;
- if (Server->Result != 200 && // OK
- Server->Result != 206 && // Partial
- Server->Result != 416) // invalid Range
- {
- char err[255];
- snprintf(err, sizeof(err) - 1, "HttpError%i", Server->Result);
- SetFailReason(err);
- _error->Error("%i %s", Server->Result, Server->Code);
- // unlink, no need keep 401/404 page content in partial/
- RemoveFile("https", File->Name());
- return false;
- }
+ case ServerMethod::ERROR_WITH_CONTENT_PAGE:
+ // unlink, no need keep 401/404 page content in partial/
+ RemoveFile(Binary.c_str(), File->Name());
+ case ServerMethod::ERROR_UNRECOVERABLE:
+ case ServerMethod::ERROR_NOT_FROM_SERVER:
+ return false;
- // invalid range-request
- if (Server->Result == 416)
- {
- RemoveFile("https", File->Name());
- delete File;
- Redirect(Itm->Uri);
- return true;
- }
+ case ServerMethod::TRY_AGAIN_OR_REDIRECT:
+ Redirect(NextURI);
+ break;
- struct stat resultStat;
- if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
- {
- _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
- return false;
- }
- Res.Size = resultStat.st_size;
+ case ServerMethod::FILE_IS_OPEN:
+ struct stat resultStat;
+ if (unlikely(stat(File->Name().c_str(), &resultStat) != 0))
+ {
+ _error->Errno("stat", "Unable to access file %s", File->Name().c_str());
+ return false;
+ }
+ Res.Size = resultStat.st_size;
- // Timestamp
- curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
- if (Res.LastModified != -1)
- {
- struct timeval times[2];
- times[0].tv_sec = Res.LastModified;
- times[1].tv_sec = Res.LastModified;
- times[0].tv_usec = times[1].tv_usec = 0;
- utimes(File->Name().c_str(), times);
- }
- else
- Res.LastModified = resultStat.st_mtime;
+ // Timestamp
+ curl_easy_getinfo(curl, CURLINFO_FILETIME, &Res.LastModified);
+ if (Res.LastModified != -1)
+ {
+ struct timeval times[2];
+ times[0].tv_sec = Res.LastModified;
+ times[1].tv_sec = Res.LastModified;
+ times[0].tv_usec = times[1].tv_usec = 0;
+ utimes(File->Name().c_str(), times);
+ }
+ else
+ Res.LastModified = resultStat.st_mtime;
- // take hashes
- Res.TakeHashes(*(Server->GetHashes()));
+ // take hashes
+ Res.TakeHashes(*(Server->GetHashes()));
- // keep apt updated
- URIDone(Res);
+ // keep apt updated
+ URIDone(Res);
+ break;
+ }
- // cleanup
delete File;
-
return true;
}
/*}}}*/
diff --git a/methods/server.cc b/methods/server.cc
index 672a3d16d..66551b893 100644
--- a/methods/server.cc
+++ b/methods/server.cc
@@ -170,7 +170,7 @@ bool ServerState::HeaderLine(string Line)
HaveContent = true;
unsigned long long * DownloadSizePtr = &DownloadSize;
- if (Result == 416)
+ if (Result == 416 || (Result >= 300 && Result < 400))
DownloadSizePtr = &JunkSize;
*DownloadSizePtr = strtoull(Val.c_str(), NULL, 10);
@@ -272,6 +272,7 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
RemoveFile("server", Queue->DestFile);
Res.IMSHit = true;
Res.LastModified = Queue->LastModified;
+ Res.Size = 0;
return IMS_HIT;
}
@@ -288,7 +289,8 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
&& Server->Result != 304 // Not Modified
&& Server->Result != 306)) // (Not part of HTTP/1.1, reserved)
{
- if (Server->Location.empty() == true);
+ if (Server->Location.empty() == true)
+ ;
else if (Server->Location[0] == '/' && Queue->Uri.empty() == false)
{
URI Uri = Queue->Uri;
@@ -329,8 +331,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
if (tmpURI.Access == Uri.Access)
return TRY_AGAIN_OR_REDIRECT;
// as well as http to https
- else if (Uri.Access == "http" && tmpURI.Access == "https")
+ else if ((Uri.Access == "http" || Uri.Access == "https+http") && tmpURI.Access == "https")
return TRY_AGAIN_OR_REDIRECT;
+ else
+ _error->Error("Redirection from %s to '%s' is forbidden", Uri.Access.c_str(), NextURI.c_str());
}
/* else pass through for error message */
}
@@ -358,11 +362,10 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
// the file is completely downloaded, but was not moved
if (Server->HaveContent == true)
{
- // Send to error page to dev/null
- FileFd DevNull("/dev/null",FileFd::WriteExists);
- Server->RunData(&DevNull);
+ // nuke the sent error page
+ Server->RunDataToDevNull();
+ Server->HaveContent = false;
}
- Server->HaveContent = false;
Server->StartPos = Server->TotalFileSize;
Server->Result = 200;
}
@@ -378,10 +381,13 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
failure */
if (Server->Result < 200 || Server->Result >= 300)
{
- std::string err;
- strprintf(err, "HttpError%u", Server->Result);
- SetFailReason(err);
- _error->Error("%u %s", Server->Result, Server->Code);
+ if (_error->PendingError() == false)
+ {
+ std::string err;
+ strprintf(err, "HttpError%u", Server->Result);
+ SetFailReason(err);
+ _error->Error("%u %s", Server->Result, Server->Code);
+ }
if (Server->HaveContent == true)
return ERROR_WITH_CONTENT_PAGE;
return ERROR_UNRECOVERABLE;
@@ -390,27 +396,6 @@ ServerMethod::DealWithHeaders(FetchResult &Res)
// This is some sort of 2xx 'data follows' reply
Res.LastModified = Server->Date;
Res.Size = Server->TotalFileSize;
-
- // Open the file
- delete File;
- File = new FileFd(Queue->DestFile,FileFd::WriteAny);
- if (_error->PendingError() == true)
- return ERROR_NOT_FROM_SERVER;
-
- FailFile = Queue->DestFile;
- FailFile.c_str(); // Make sure we don't do a malloc in the signal handler
- FailFd = File->Fd();
- FailTime = Server->Date;
-
- if (Server->InitHashes(Queue->ExpectedHashes) == false || Server->AddPartialFileToHashes(*File) == false)
- {
- _error->Errno("read",_("Problem hashing file"));
- return ERROR_NOT_FROM_SERVER;
- }
- if (Server->StartPos > 0)
- Res.ResumePoint = Server->StartPos;
-
- SetNonBlock(File->Fd(),true);
return FILE_IS_OPEN;
}
/*}}}*/
@@ -729,12 +714,7 @@ int ServerMethod::Loop()
case ERROR_WITH_CONTENT_PAGE:
{
Fail();
-
- // Send to content to dev/null
- File = new FileFd("/dev/null",FileFd::WriteExists);
- Server->RunData(File);
- delete File;
- File = 0;
+ Server->RunDataToDevNull();
break;
}
diff --git a/methods/server.h b/methods/server.h
index b23b0e50a..f2868c96a 100644
--- a/methods/server.h
+++ b/methods/server.h
@@ -141,7 +141,7 @@ class ServerMethod : public aptMethod
TRY_AGAIN_OR_REDIRECT
};
/** \brief Handle the retrieved header data */
- DealWithHeadersResult DealWithHeaders(FetchResult &Res);
+ virtual DealWithHeadersResult DealWithHeaders(FetchResult &Res);
// In the event of a fatal signal this file will be closed and timestamped.
static std::string FailFile;
diff --git a/test/integration/test-apt-https-no-redirect b/test/integration/test-apt-https-no-redirect
index b437ac1e6..69e2ba57f 100755
--- a/test/integration/test-apt-https-no-redirect
+++ b/test/integration/test-apt-https-no-redirect
@@ -11,7 +11,9 @@ insertpackage 'stable' 'apt' 'all' '1'
setupaptarchive --no-update
echo 'alright' > aptarchive/working
-changetohttpswebserver -o "aptwebserver::redirect::replace::/redirectme/=http://localhost:${APTHTTPPORT}/"
+changetohttpswebserver
+webserverconfig 'aptwebserver::redirect::replace::/redirectme/' "http://localhost:${APTHTTPPORT}/"
+webserverconfig 'aptwebserver::redirect::replace::/redirectme2/' "https://localhost:${APTHTTPSPORT}/"
msgtest 'download of a file works via' 'http'
testsuccess --nomsg downloadfile "http://localhost:${APTHTTPPORT}/working" httpfile
@@ -25,4 +27,4 @@ msgtest 'download of a file does not work if' 'https redirected to http'
testfailure --nomsg downloadfile "https://localhost:${APTHTTPSPORT}/redirectme/working" redirectfile
msgtest 'libcurl has forbidden access in last request to' 'http resource'
-testsuccess --nomsg grep -q -E -- 'Protocol "?http"? not supported or disabled in libcurl' rootdir/tmp/testfailure.output
+testsuccess --nomsg grep -q -E -- "Redirection from https to 'http://.*' is forbidden" rootdir/tmp/testfailure.output
diff --git a/test/integration/test-partial-file-support b/test/integration/test-partial-file-support
index e2d2743b3..1c5d120d8 100755
--- a/test/integration/test-partial-file-support
+++ b/test/integration/test-partial-file-support
@@ -146,3 +146,6 @@ serverconfigs "http://localhost:${APTHTTPPORT}"
changetohttpswebserver
serverconfigs "https://localhost:${APTHTTPSPORT}"
+
+webserverconfig 'aptwebserver::redirect::replace::/redirectme/' "https://localhost:${APTHTTPSPORT}/"
+serverconfigs "http://localhost:${APTHTTPPORT}/redirectme"