diff options
author | Michael Vogt <michael.vogt@ubuntu.com> | 2006-12-19 12:03:30 +0100 |
---|---|---|
committer | Michael Vogt <michael.vogt@ubuntu.com> | 2006-12-19 12:03:30 +0100 |
commit | d546f98d46c6a1d813976825f615e39f17b7ebf5 (patch) | |
tree | 48dad9b7db785798586c716233c939ded8518599 /methods/https.cc | |
parent | 4172c78468a04078c97146dbc2e2979e63906f5b (diff) | |
download | apt-d546f98d46c6a1d813976825f615e39f17b7ebf5.tar.gz |
* added https transport method as optional pacakge
Diffstat (limited to 'methods/https.cc')
-rw-r--r-- | methods/https.cc | 226 |
1 files changed, 226 insertions, 0 deletions
diff --git a/methods/https.cc b/methods/https.cc new file mode 100644 index 000000000..06b7dff48 --- /dev/null +++ b/methods/https.cc @@ -0,0 +1,226 @@ +// -*- mode: cpp; mode: fold -*- +// Description /*{{{*/ +// $Id: http.cc,v 1.59 2004/05/08 19:42:35 mdz Exp $ +/* ###################################################################### + + HTTPS Aquire Method - This is the HTTPS aquire method for APT. + + It uses libcurl + + ##################################################################### */ + /*}}}*/ +// Include Files /*{{{*/ +#include <apt-pkg/fileutl.h> +#include <apt-pkg/acquire-method.h> +#include <apt-pkg/error.h> +#include <apt-pkg/hashes.h> + +#include <sys/stat.h> +#include <sys/time.h> +#include <utime.h> +#include <unistd.h> +#include <signal.h> +#include <stdio.h> +#include <errno.h> +#include <string.h> +#include <iostream> +#include <apti18n.h> +#include <sstream> + +#include "config.h" +#include "https.h" + + /*}}}*/ +using namespace std; + +size_t +HttpsMethod::write_data(void *buffer, size_t size, size_t nmemb, void *userp) +{ + HttpsMethod *me = (HttpsMethod *)userp; + + if(me->File->Write(buffer, size*nmemb) != true) + return false; + + return size*nmemb; +} + +int +HttpsMethod::progress_callback(void *clientp, double dltotal, double dlnow, + double ultotal, double ulnow) +{ + HttpsMethod *me = (HttpsMethod *)clientp; + if(dltotal > 0 && me->Res.Size == 0) { + me->Res.Size = dltotal; + me->URIStart(me->Res); + } + return 0; +} + +bool HttpsMethod::SetupProxy() +{ + URI ServerName = Queue->Uri; + + // Determine the proxy setting + if (getenv("http_proxy") == 0) + { + string DefProxy = _config->Find("Acquire::http::Proxy"); + string SpecificProxy = _config->Find("Acquire::http::Proxy::" + ServerName.Host); + if (SpecificProxy.empty() == false) + { + if (SpecificProxy == "DIRECT") + Proxy = ""; + else + Proxy = SpecificProxy; + } + else + Proxy = DefProxy; + } + + // Parse no_proxy, a , separated list of domains + if (getenv("no_proxy") != 0) + { + if (CheckDomainList(ServerName.Host,getenv("no_proxy")) == true) + Proxy = ""; + } + + // Determine what host and port to use based on the proxy settings + int Port = 0; + string Host; + if (Proxy.empty() == true || Proxy.Host.empty() == true) + { + } + else + { + if (Proxy.Port != 0) + curl_easy_setopt(curl, CURLOPT_PROXYPORT, Proxy.Port); + curl_easy_setopt(curl, CURLOPT_PROXY, Proxy.Host.c_str()); + } +} + + +// HttpsMethod::Fetch - Fetch an item /*{{{*/ +// --------------------------------------------------------------------- +/* This adds an item to the pipeline. We keep the pipeline at a fixed + depth. */ +bool HttpsMethod::Fetch(FetchItem *Itm) +{ + stringstream ss; + struct stat SBuf; + struct curl_slist *headers=NULL; + + // TODO: + // - http::Timeout + // - http::Pipeline-Depth + // - error checking/reporting + // - more debug options? (CURLOPT_DEBUGFUNCTION?) + + SetupProxy(); + + // callbacks + curl_easy_setopt(curl, CURLOPT_URL, Itm->Uri.c_str()); + curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); + curl_easy_setopt(curl, CURLOPT_WRITEDATA, this); + curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progress_callback); + curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, this); + curl_easy_setopt(curl, CURLOPT_NOPROGRESS, false); + curl_easy_setopt(curl, CURLOPT_FAILONERROR, true); + + // FIXME: https: offer various options of verification + curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, false); + + // cache-control + if(_config->FindB("Acquire::http::No-Cache",false) == false) + { + // cache enabled + if (_config->FindB("Acquire::http::No-Store",false) == true) + headers = curl_slist_append(headers,"Cache-Control: no-store"); + ioprintf(ss, "Cache-Control: max-age=%u", _config->FindI("Acquire::http::Max-Age",0)); + headers = curl_slist_append(headers, ss.str().c_str()); + } else { + // cache disabled by user + headers = curl_slist_append(headers, "Cache-Control: no-cache"); + headers = curl_slist_append(headers, "Pragma: no-cache"); + } + curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers); + + // set time values + curl_easy_setopt(curl, CURLOPT_TIMECONDITION, CURL_TIMECOND_IFMODSINCE); + curl_easy_setopt(curl, CURLOPT_TIMEVALUE, Itm->LastModified); + + // speed limit + int dlLimit = _config->FindI("Acquire::http::Dl-Limit",0)*1024; + if (dlLimit > 0) + curl_easy_setopt(curl, CURLOPT_MAX_RECV_SPEED_LARGE, dlLimit); + + // set header + curl_easy_setopt(curl, CURLOPT_USERAGENT,"Debian APT-CURL/1.0 ("VERSION")"); + + // debug + if(_config->FindB("Debug::Acquire::http", false)) + curl_easy_setopt(curl, CURLOPT_VERBOSE, true); + + // In this case we send an if-range query with a range header + if (stat(Itm->DestFile.c_str(),&SBuf) >= 0 && SBuf.st_size > 0) + curl_easy_setopt(curl, CURLOPT_RESUME_FROM, (long)SBuf.st_size); + + // go for it - if the file exists, append on it + File = new FileFd(Itm->DestFile, FileFd::WriteAny); + File->Seek(File->Size()); + + // keep apt updated + Res.Filename = Itm->DestFile; + + // get it! + CURLcode success = curl_easy_perform(curl); + + + // cleanup + if(success != 0) { + Fail(); + return true; + } + + if (Res.Size == 0) + Res.Size = File->Size(); + + // check the downloaded result + struct stat Buf; + if (stat(File->Name().c_str(),&Buf) == 0) + { + Res.Size = Buf.st_size; + Res.Filename = File->Name(); + Res.LastModified = Buf.st_mtime; + Res.IMSHit = false; + if (Itm->LastModified == Buf.st_mtime && Itm->LastModified != 0) + Res.IMSHit = true; + } + + // take hashes + Hashes Hash; + FileFd Fd(Res.Filename, FileFd::ReadOnly); + Hash.AddFD(Fd.Fd(), Fd.Size()); + Res.TakeHashes(Hash); + + // keep apt updated + URIDone(Res); + + // cleanup + File->Close(); + Res.Size = 0; + delete File; + curl_slist_free_all(headers); + + return true; +}; + +int main() +{ + setlocale(LC_ALL, ""); + + HttpsMethod Mth; + curl_global_init(CURL_GLOBAL_SSL) ; + + return Mth.Run(); +} + + |