2018-01-27 04:51:07 +01:00
|
|
|
<?php
|
|
|
|
/**
|
|
|
|
* @file src/Util/Network.php
|
|
|
|
*/
|
|
|
|
namespace Friendica\Util;
|
|
|
|
|
2019-02-16 23:11:30 +01:00
|
|
|
use DOMDocument;
|
|
|
|
use DomXPath;
|
|
|
|
use Friendica\Core\Config;
|
2018-12-26 07:06:24 +01:00
|
|
|
use Friendica\Core\Hook;
|
2018-10-29 22:20:46 +01:00
|
|
|
use Friendica\Core\Logger;
|
2018-01-27 04:51:07 +01:00
|
|
|
use Friendica\Core\System;
|
2018-10-10 22:08:13 +02:00
|
|
|
use Friendica\Network\CurlResult;
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
class Network
|
|
|
|
{
|
|
|
|
/**
|
2018-07-14 10:58:19 +02:00
|
|
|
* Curl wrapper
|
2018-01-27 04:51:07 +01:00
|
|
|
*
|
|
|
|
* If binary flag is true, return binary results.
|
|
|
|
* Set the cookiejar argument to a string (e.g. "/tmp/friendica-cookies.txt")
|
|
|
|
* to preserve cookies from one request to the next.
|
|
|
|
*
|
2018-07-14 10:58:19 +02:00
|
|
|
* @brief Curl wrapper
|
2018-01-27 04:51:07 +01:00
|
|
|
* @param string $url URL to fetch
|
|
|
|
* @param boolean $binary default false
|
|
|
|
* TRUE if asked to return binary results (file download)
|
|
|
|
* @param integer $redirects The recursion counter for internal use - default 0
|
|
|
|
* @param integer $timeout Timeout in seconds, default system config value or 60 seconds
|
|
|
|
* @param string $accept_content supply Accept: header with 'accept_content' as the value
|
|
|
|
* @param string $cookiejar Path to cookie jar file
|
|
|
|
*
|
|
|
|
* @return string The fetched content
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-10-09 19:58:58 +02:00
|
|
|
public static function fetchUrl($url, $binary = false, &$redirects = 0, $timeout = 0, $accept_content = null, $cookiejar = '')
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
2018-04-28 08:46:46 +02:00
|
|
|
$ret = self::fetchUrlFull($url, $binary, $redirects, $timeout, $accept_content, $cookiejar);
|
2018-04-27 21:27:55 +02:00
|
|
|
|
2018-10-10 21:08:43 +02:00
|
|
|
return $ret->getBody();
|
2018-04-27 21:27:55 +02:00
|
|
|
}
|
2018-04-27 22:22:17 +02:00
|
|
|
|
|
|
|
/**
|
2018-07-14 10:58:19 +02:00
|
|
|
* Curl wrapper with array of return values.
|
2018-04-27 22:22:17 +02:00
|
|
|
*
|
|
|
|
* Inner workings and parameters are the same as @ref fetchUrl but returns an array with
|
|
|
|
* all the information collected during the fetch.
|
|
|
|
*
|
2018-07-14 10:58:19 +02:00
|
|
|
* @brief Curl wrapper with array of return values.
|
2018-04-27 22:22:17 +02:00
|
|
|
* @param string $url URL to fetch
|
|
|
|
* @param boolean $binary default false
|
|
|
|
* TRUE if asked to return binary results (file download)
|
|
|
|
* @param integer $redirects The recursion counter for internal use - default 0
|
|
|
|
* @param integer $timeout Timeout in seconds, default system config value or 60 seconds
|
|
|
|
* @param string $accept_content supply Accept: header with 'accept_content' as the value
|
|
|
|
* @param string $cookiejar Path to cookie jar file
|
|
|
|
*
|
2018-10-10 22:08:13 +02:00
|
|
|
* @return CurlResult With all relevant information, 'body' contains the actual fetched content.
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-04-27 22:22:17 +02:00
|
|
|
*/
|
2018-10-09 19:58:58 +02:00
|
|
|
public static function fetchUrlFull($url, $binary = false, &$redirects = 0, $timeout = 0, $accept_content = null, $cookiejar = '')
|
2018-04-27 21:27:55 +02:00
|
|
|
{
|
|
|
|
return self::curl(
|
2018-01-27 04:51:07 +01:00
|
|
|
$url,
|
|
|
|
$binary,
|
|
|
|
$redirects,
|
|
|
|
['timeout'=>$timeout,
|
|
|
|
'accept_content'=>$accept_content,
|
|
|
|
'cookiejar'=>$cookiejar
|
|
|
|
]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief fetches an URL.
|
|
|
|
*
|
|
|
|
* @param string $url URL to fetch
|
|
|
|
* @param boolean $binary default false
|
|
|
|
* TRUE if asked to return binary results (file download)
|
|
|
|
* @param int $redirects The recursion counter for internal use - default 0
|
|
|
|
* @param array $opts (optional parameters) assoziative array with:
|
|
|
|
* 'accept_content' => supply Accept: header with 'accept_content' as the value
|
|
|
|
* 'timeout' => int Timeout in seconds, default system config value or 60 seconds
|
|
|
|
* 'http_auth' => username:password
|
|
|
|
* 'novalidate' => do not validate SSL certs, default is to validate using our CA list
|
|
|
|
* 'nobody' => only return the header
|
|
|
|
* 'cookiejar' => path to cookie jar file
|
2018-11-03 22:37:08 +01:00
|
|
|
* 'header' => header array
|
2018-01-27 04:51:07 +01:00
|
|
|
*
|
2018-10-10 22:08:13 +02:00
|
|
|
* @return CurlResult
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function curl($url, $binary = false, &$redirects = 0, $opts = [])
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$stamp1 = microtime(true);
|
|
|
|
|
2018-12-28 01:22:35 +01:00
|
|
|
$a = \get_app();
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2019-01-16 23:34:57 +01:00
|
|
|
if (strlen($url) > 1000) {
|
|
|
|
Logger::log('URL is longer than 1000 characters. Callstack: ' . System::callstack(20), Logger::DEBUG);
|
|
|
|
return CurlResult::createErrorCurl(substr($url, 0, 200));
|
|
|
|
}
|
|
|
|
|
2019-01-07 18:51:48 +01:00
|
|
|
$parts2 = [];
|
2018-03-23 19:41:59 +01:00
|
|
|
$parts = parse_url($url);
|
2018-07-10 14:27:56 +02:00
|
|
|
$path_parts = explode('/', defaults($parts, 'path', ''));
|
2018-03-23 19:41:59 +01:00
|
|
|
foreach ($path_parts as $part) {
|
2018-10-10 21:08:43 +02:00
|
|
|
if (strlen($part) <> mb_strlen($part)) {
|
2018-03-23 19:41:59 +01:00
|
|
|
$parts2[] = rawurlencode($part);
|
2018-10-10 21:08:43 +02:00
|
|
|
} else {
|
|
|
|
$parts2[] = $part;
|
|
|
|
}
|
2018-03-23 19:41:59 +01:00
|
|
|
}
|
2018-10-10 21:08:43 +02:00
|
|
|
$parts['path'] = implode('/', $parts2);
|
2018-03-23 19:41:59 +01:00
|
|
|
$url = self::unparseURL($parts);
|
|
|
|
|
2018-01-27 17:13:41 +01:00
|
|
|
if (self::isUrlBlocked($url)) {
|
2018-10-30 14:58:45 +01:00
|
|
|
Logger::log('domain of ' . $url . ' is blocked', Logger::DATA);
|
2018-10-10 22:08:13 +02:00
|
|
|
return CurlResult::createErrorCurl($url);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
$ch = @curl_init($url);
|
|
|
|
|
|
|
|
if (($redirects > 8) || (!$ch)) {
|
2018-10-10 22:08:13 +02:00
|
|
|
return CurlResult::createErrorCurl($url);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
@curl_setopt($ch, CURLOPT_HEADER, true);
|
|
|
|
|
2018-11-30 15:06:22 +01:00
|
|
|
if (!empty($opts['cookiejar'])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
curl_setopt($ch, CURLOPT_COOKIEJAR, $opts["cookiejar"]);
|
|
|
|
curl_setopt($ch, CURLOPT_COOKIEFILE, $opts["cookiejar"]);
|
|
|
|
}
|
|
|
|
|
|
|
|
// These settings aren't needed. We're following the location already.
|
|
|
|
// @curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);
|
|
|
|
// @curl_setopt($ch, CURLOPT_MAXREDIRS, 5);
|
|
|
|
|
2018-11-30 15:06:22 +01:00
|
|
|
if (!empty($opts['accept_content'])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
curl_setopt(
|
|
|
|
$ch,
|
|
|
|
CURLOPT_HTTPHEADER,
|
|
|
|
['Accept: ' . $opts['accept_content']]
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2018-11-03 22:37:08 +01:00
|
|
|
if (!empty($opts['header'])) {
|
|
|
|
curl_setopt($ch, CURLOPT_HTTPHEADER, $opts['header']);
|
|
|
|
}
|
|
|
|
|
2018-01-27 04:51:07 +01:00
|
|
|
@curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
2018-10-09 19:58:58 +02:00
|
|
|
@curl_setopt($ch, CURLOPT_USERAGENT, $a->getUserAgent());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
$range = intval(Config::get('system', 'curl_range_bytes', 0));
|
|
|
|
|
|
|
|
if ($range > 0) {
|
|
|
|
@curl_setopt($ch, CURLOPT_RANGE, '0-' . $range);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Without this setting it seems as if some webservers send compressed content
|
|
|
|
// This seems to confuse curl so that it shows this uncompressed.
|
|
|
|
/// @todo We could possibly set this value to "gzip" or something similar
|
|
|
|
curl_setopt($ch, CURLOPT_ENCODING, '');
|
|
|
|
|
2018-11-30 15:06:22 +01:00
|
|
|
if (!empty($opts['headers'])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
@curl_setopt($ch, CURLOPT_HTTPHEADER, $opts['headers']);
|
|
|
|
}
|
|
|
|
|
2018-11-30 15:06:22 +01:00
|
|
|
if (!empty($opts['nobody'])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
@curl_setopt($ch, CURLOPT_NOBODY, $opts['nobody']);
|
|
|
|
}
|
|
|
|
|
2018-11-30 15:06:22 +01:00
|
|
|
if (!empty($opts['timeout'])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
@curl_setopt($ch, CURLOPT_TIMEOUT, $opts['timeout']);
|
|
|
|
} else {
|
|
|
|
$curl_time = Config::get('system', 'curl_timeout', 60);
|
|
|
|
@curl_setopt($ch, CURLOPT_TIMEOUT, intval($curl_time));
|
|
|
|
}
|
|
|
|
|
|
|
|
// by default we will allow self-signed certs
|
|
|
|
// but you can override this
|
|
|
|
|
|
|
|
$check_cert = Config::get('system', 'verifyssl');
|
|
|
|
@curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, (($check_cert) ? true : false));
|
|
|
|
|
|
|
|
if ($check_cert) {
|
|
|
|
@curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
$proxy = Config::get('system', 'proxy');
|
|
|
|
|
|
|
|
if (strlen($proxy)) {
|
|
|
|
@curl_setopt($ch, CURLOPT_HTTPPROXYTUNNEL, 1);
|
|
|
|
@curl_setopt($ch, CURLOPT_PROXY, $proxy);
|
|
|
|
$proxyuser = @Config::get('system', 'proxyuser');
|
|
|
|
|
|
|
|
if (strlen($proxyuser)) {
|
|
|
|
@curl_setopt($ch, CURLOPT_PROXYUSERPWD, $proxyuser);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (Config::get('system', 'ipv4_resolve', false)) {
|
|
|
|
curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4);
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($binary) {
|
|
|
|
@curl_setopt($ch, CURLOPT_BINARYTRANSFER, 1);
|
|
|
|
}
|
|
|
|
|
|
|
|
// don't let curl abort the entire application
|
|
|
|
// if it throws any errors.
|
|
|
|
|
|
|
|
$s = @curl_exec($ch);
|
|
|
|
$curl_info = @curl_getinfo($ch);
|
|
|
|
|
|
|
|
// Special treatment for HTTP Code 416
|
|
|
|
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/416
|
|
|
|
if (($curl_info['http_code'] == 416) && ($range > 0)) {
|
|
|
|
@curl_setopt($ch, CURLOPT_RANGE, '');
|
|
|
|
$s = @curl_exec($ch);
|
|
|
|
$curl_info = @curl_getinfo($ch);
|
|
|
|
}
|
|
|
|
|
2018-10-10 22:08:13 +02:00
|
|
|
$curlResponse = new CurlResult($url, $s, $curl_info, curl_errno($ch), curl_error($ch));
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-10-10 21:08:43 +02:00
|
|
|
if ($curlResponse->isRedirectUrl()) {
|
|
|
|
$redirects++;
|
2018-10-29 22:20:46 +01:00
|
|
|
Logger::log('curl: redirect ' . $url . ' to ' . $curlResponse->getRedirectUrl());
|
2018-10-10 21:08:43 +02:00
|
|
|
@curl_close($ch);
|
|
|
|
return self::curl($curlResponse->getRedirectUrl(), $binary, $redirects, $opts);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
@curl_close($ch);
|
|
|
|
|
2019-02-16 23:17:10 +01:00
|
|
|
$a->getProfiler()->saveTimestamp($stamp1, 'network', System::callstack());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-10-10 21:08:43 +02:00
|
|
|
return $curlResponse;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Send POST request to $url
|
|
|
|
*
|
|
|
|
* @param string $url URL to post
|
|
|
|
* @param mixed $params array of POST variables
|
|
|
|
* @param string $headers HTTP headers
|
|
|
|
* @param integer $redirects Recursion counter for internal use - default = 0
|
|
|
|
* @param integer $timeout The timeout in seconds, default system config value or 60 seconds
|
|
|
|
*
|
2018-10-10 22:08:13 +02:00
|
|
|
* @return CurlResult The content
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function post($url, $params, $headers = null, &$redirects = 0, $timeout = 0)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$stamp1 = microtime(true);
|
|
|
|
|
2018-01-27 17:13:41 +01:00
|
|
|
if (self::isUrlBlocked($url)) {
|
2018-10-30 14:58:45 +01:00
|
|
|
Logger::log('post_url: domain of ' . $url . ' is blocked', Logger::DATA);
|
2018-10-10 22:08:13 +02:00
|
|
|
return CurlResult::createErrorCurl($url);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
2018-12-28 01:22:35 +01:00
|
|
|
$a = \get_app();
|
2018-01-27 04:51:07 +01:00
|
|
|
$ch = curl_init($url);
|
|
|
|
|
|
|
|
if (($redirects > 8) || (!$ch)) {
|
2018-10-10 22:08:13 +02:00
|
|
|
return CurlResult::createErrorCurl($url);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
2018-10-30 14:58:45 +01:00
|
|
|
Logger::log('post_url: start ' . $url, Logger::DATA);
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
curl_setopt($ch, CURLOPT_HEADER, true);
|
|
|
|
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
|
|
|
curl_setopt($ch, CURLOPT_POST, 1);
|
|
|
|
curl_setopt($ch, CURLOPT_POSTFIELDS, $params);
|
2018-10-09 19:58:58 +02:00
|
|
|
curl_setopt($ch, CURLOPT_USERAGENT, $a->getUserAgent());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if (Config::get('system', 'ipv4_resolve', false)) {
|
|
|
|
curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (intval($timeout)) {
|
|
|
|
curl_setopt($ch, CURLOPT_TIMEOUT, $timeout);
|
|
|
|
} else {
|
|
|
|
$curl_time = Config::get('system', 'curl_timeout', 60);
|
|
|
|
curl_setopt($ch, CURLOPT_TIMEOUT, intval($curl_time));
|
|
|
|
}
|
|
|
|
|
|
|
|
if (defined('LIGHTTPD')) {
|
|
|
|
if (!is_array($headers)) {
|
|
|
|
$headers = ['Expect:'];
|
|
|
|
} else {
|
|
|
|
if (!in_array('Expect:', $headers)) {
|
|
|
|
array_push($headers, 'Expect:');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if ($headers) {
|
|
|
|
curl_setopt($ch, CURLOPT_HTTPHEADER, $headers);
|
|
|
|
}
|
|
|
|
|
|
|
|
$check_cert = Config::get('system', 'verifyssl');
|
|
|
|
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, (($check_cert) ? true : false));
|
|
|
|
|
|
|
|
if ($check_cert) {
|
|
|
|
@curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
|
|
|
|
}
|
|
|
|
|
|
|
|
$proxy = Config::get('system', 'proxy');
|
|
|
|
|
|
|
|
if (strlen($proxy)) {
|
|
|
|
curl_setopt($ch, CURLOPT_HTTPPROXYTUNNEL, 1);
|
|
|
|
curl_setopt($ch, CURLOPT_PROXY, $proxy);
|
|
|
|
$proxyuser = Config::get('system', 'proxyuser');
|
|
|
|
if (strlen($proxyuser)) {
|
|
|
|
curl_setopt($ch, CURLOPT_PROXYUSERPWD, $proxyuser);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// don't let curl abort the entire application
|
|
|
|
// if it throws any errors.
|
|
|
|
|
|
|
|
$s = @curl_exec($ch);
|
|
|
|
|
|
|
|
$curl_info = curl_getinfo($ch);
|
|
|
|
|
2018-10-10 22:08:13 +02:00
|
|
|
$curlResponse = new CurlResult($url, $s, $curl_info, curl_errno($ch), curl_error($ch));
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-10-10 21:08:43 +02:00
|
|
|
if ($curlResponse->isRedirectUrl()) {
|
|
|
|
$redirects++;
|
2018-10-29 22:20:46 +01:00
|
|
|
Logger::log('post_url: redirect ' . $url . ' to ' . $curlResponse->getRedirectUrl());
|
2018-10-10 21:08:43 +02:00
|
|
|
curl_close($ch);
|
|
|
|
return self::post($curlResponse->getRedirectUrl(), $params, $headers, $redirects, $timeout);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
curl_close($ch);
|
|
|
|
|
2019-02-16 23:17:10 +01:00
|
|
|
$a->getProfiler()->saveTimestamp($stamp1, 'network', System::callstack());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-10-30 14:58:45 +01:00
|
|
|
Logger::log('post_url: end ' . $url, Logger::DATA);
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-10-10 21:08:43 +02:00
|
|
|
return $curlResponse;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2018-01-27 17:13:41 +01:00
|
|
|
* @brief Check URL to see if it's real
|
2018-01-27 04:51:07 +01:00
|
|
|
*
|
|
|
|
* Take a URL from the wild, prepend http:// if necessary
|
|
|
|
* and check DNS to see if it's real (or check if is a valid IP address)
|
|
|
|
*
|
|
|
|
* @param string $url The URL to be validated
|
|
|
|
* @return string|boolean The actual working URL, false else
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isUrlValid($url)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
if (Config::get('system', 'disable_url_validation')) {
|
|
|
|
return $url;
|
|
|
|
}
|
|
|
|
|
|
|
|
// no naked subdomains (allow localhost for tests)
|
|
|
|
if (strpos($url, '.') === false && strpos($url, '/localhost/') === false) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (substr($url, 0, 4) != 'http') {
|
|
|
|
$url = 'http://' . $url;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// @TODO Really suppress function outcomes? Why not find them + debug them?
|
|
|
|
$h = @parse_url($url);
|
|
|
|
|
2018-12-15 10:32:47 +01:00
|
|
|
if (!empty($h['host']) && (@dns_get_record($h['host'], DNS_A + DNS_CNAME) || filter_var($h['host'], FILTER_VALIDATE_IP) )) {
|
2018-01-27 04:51:07 +01:00
|
|
|
return $url;
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Checks that email is an actual resolvable internet address
|
|
|
|
*
|
|
|
|
* @param string $addr The email address
|
|
|
|
* @return boolean True if it's a valid email address, false if it's not
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isEmailDomainValid($addr)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
if (Config::get('system', 'disable_email_validation')) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (! strpos($addr, '@')) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
$h = substr($addr, strpos($addr, '@') + 1);
|
|
|
|
|
2018-08-02 19:07:20 +02:00
|
|
|
// Concerning the @ see here: https://stackoverflow.com/questions/36280957/dns-get-record-a-temporary-server-error-occurred
|
|
|
|
if ($h && (@dns_get_record($h, DNS_A + DNS_MX) || filter_var($h, FILTER_VALIDATE_IP) )) {
|
2018-06-03 11:29:06 +02:00
|
|
|
return true;
|
|
|
|
}
|
2018-08-02 19:07:20 +02:00
|
|
|
if ($h && @dns_get_record($h, DNS_CNAME + DNS_MX)) {
|
2018-01-27 04:51:07 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Check if URL is allowed
|
|
|
|
*
|
|
|
|
* Check $url against our list of allowed sites,
|
|
|
|
* wildcards allowed. If allowed_sites is unset return true;
|
|
|
|
*
|
|
|
|
* @param string $url URL which get tested
|
|
|
|
* @return boolean True if url is allowed otherwise return false
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isUrlAllowed($url)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$h = @parse_url($url);
|
|
|
|
|
|
|
|
if (! $h) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
$str_allowed = Config::get('system', 'allowed_sites');
|
|
|
|
if (! $str_allowed) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
$found = false;
|
|
|
|
|
|
|
|
$host = strtolower($h['host']);
|
|
|
|
|
|
|
|
// always allow our own site
|
|
|
|
if ($host == strtolower($_SERVER['SERVER_NAME'])) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
$fnmatch = function_exists('fnmatch');
|
|
|
|
$allowed = explode(',', $str_allowed);
|
|
|
|
|
|
|
|
if (count($allowed)) {
|
|
|
|
foreach ($allowed as $a) {
|
|
|
|
$pat = strtolower(trim($a));
|
|
|
|
if (($fnmatch && fnmatch($pat, $host)) || ($pat == $host)) {
|
|
|
|
$found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return $found;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Checks if the provided url domain is on the domain blocklist.
|
|
|
|
* Returns true if it is or malformed URL, false if not.
|
|
|
|
*
|
|
|
|
* @param string $url The url to check the domain from
|
|
|
|
*
|
|
|
|
* @return boolean
|
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isUrlBlocked($url)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
2018-07-18 03:00:22 +02:00
|
|
|
$host = @parse_url($url, PHP_URL_HOST);
|
2018-08-26 21:49:39 +02:00
|
|
|
if (!$host) {
|
|
|
|
return false;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
$domain_blocklist = Config::get('system', 'blocklist', []);
|
2018-08-26 21:49:39 +02:00
|
|
|
if (!$domain_blocklist) {
|
2018-01-27 04:51:07 +01:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
foreach ($domain_blocklist as $domain_block) {
|
2018-07-18 03:00:22 +02:00
|
|
|
if (strcasecmp($domain_block['domain'], $host) === 0) {
|
2018-01-27 04:51:07 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Check if email address is allowed to register here.
|
|
|
|
*
|
|
|
|
* Compare against our list (wildcards allowed).
|
|
|
|
*
|
|
|
|
* @param string $email email address
|
|
|
|
* @return boolean False if not allowed, true if allowed
|
2019-01-06 22:06:53 +01:00
|
|
|
* or if allowed list is not configured
|
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isEmailDomainAllowed($email)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$domain = strtolower(substr($email, strpos($email, '@') + 1));
|
|
|
|
if (!$domain) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
$str_allowed = Config::get('system', 'allowed_email', '');
|
2018-11-30 15:06:22 +01:00
|
|
|
if (empty($str_allowed)) {
|
2018-01-27 04:51:07 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
$allowed = explode(',', $str_allowed);
|
|
|
|
|
2018-01-27 17:13:41 +01:00
|
|
|
return self::isDomainAllowed($domain, $allowed);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Checks for the existence of a domain in a domain list
|
|
|
|
*
|
|
|
|
* @brief Checks for the existence of a domain in a domain list
|
|
|
|
* @param string $domain
|
|
|
|
* @param array $domain_list
|
|
|
|
* @return boolean
|
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function isDomainAllowed($domain, array $domain_list)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$found = false;
|
|
|
|
|
|
|
|
foreach ($domain_list as $item) {
|
|
|
|
$pat = strtolower(trim($item));
|
|
|
|
if (fnmatch($pat, $domain) || ($pat == $domain)) {
|
|
|
|
$found = true;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return $found;
|
|
|
|
}
|
|
|
|
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function lookupAvatarByEmail($email)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
2018-10-23 16:36:57 +02:00
|
|
|
$avatar['size'] = 300;
|
2018-01-27 04:51:07 +01:00
|
|
|
$avatar['email'] = $email;
|
|
|
|
$avatar['url'] = '';
|
|
|
|
$avatar['success'] = false;
|
|
|
|
|
2018-12-26 07:06:24 +01:00
|
|
|
Hook::callAll('avatar_lookup', $avatar);
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if (! $avatar['success']) {
|
2018-10-23 16:36:57 +02:00
|
|
|
$avatar['url'] = System::baseUrl() . '/images/person-300.jpg';
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
2018-10-30 14:58:45 +01:00
|
|
|
Logger::log('Avatar: ' . $avatar['email'] . ' ' . $avatar['url'], Logger::DEBUG);
|
2018-01-27 04:51:07 +01:00
|
|
|
return $avatar['url'];
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Remove Google Analytics and other tracking platforms params from URL
|
|
|
|
*
|
|
|
|
* @param string $url Any user-submitted URL that may contain tracking params
|
|
|
|
* @return string The same URL stripped of tracking parameters
|
|
|
|
*/
|
|
|
|
public static function stripTrackingQueryParams($url)
|
|
|
|
{
|
|
|
|
$urldata = parse_url($url);
|
2018-07-08 14:58:43 +02:00
|
|
|
if (!empty($urldata["query"])) {
|
2018-01-27 04:51:07 +01:00
|
|
|
$query = $urldata["query"];
|
|
|
|
parse_str($query, $querydata);
|
|
|
|
|
|
|
|
if (is_array($querydata)) {
|
|
|
|
foreach ($querydata as $param => $value) {
|
|
|
|
if (in_array(
|
|
|
|
$param,
|
|
|
|
[
|
|
|
|
"utm_source", "utm_medium", "utm_term", "utm_content", "utm_campaign",
|
|
|
|
"wt_mc", "pk_campaign", "pk_kwd", "mc_cid", "mc_eid",
|
|
|
|
"fb_action_ids", "fb_action_types", "fb_ref",
|
|
|
|
"awesm", "wtrid",
|
|
|
|
"woo_campaign", "woo_source", "woo_medium", "woo_content", "woo_term"]
|
|
|
|
)
|
|
|
|
) {
|
|
|
|
$pair = $param . "=" . urlencode($value);
|
|
|
|
$url = str_replace($pair, "", $url);
|
|
|
|
|
|
|
|
// Second try: if the url isn't encoded completely
|
|
|
|
$pair = $param . "=" . str_replace(" ", "+", $value);
|
|
|
|
$url = str_replace($pair, "", $url);
|
|
|
|
|
|
|
|
// Third try: Maybey the url isn't encoded at all
|
|
|
|
$pair = $param . "=" . $value;
|
|
|
|
$url = str_replace($pair, "", $url);
|
|
|
|
|
|
|
|
$url = str_replace(["?&", "&&"], ["?", ""], $url);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (substr($url, -1, 1) == "?") {
|
|
|
|
$url = substr($url, 0, -1);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return $url;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Returns the original URL of the provided URL
|
|
|
|
*
|
|
|
|
* This function strips tracking query params and follows redirections, either
|
|
|
|
* through HTTP code or meta refresh tags. Stops after 10 redirections.
|
|
|
|
*
|
2019-01-06 22:06:53 +01:00
|
|
|
* @todo Remove the $fetchbody parameter that generates an extraneous HEAD request
|
2018-01-27 04:51:07 +01:00
|
|
|
*
|
2019-01-06 22:06:53 +01:00
|
|
|
* @see ParseUrl::getSiteinfo
|
2018-01-27 04:51:07 +01:00
|
|
|
*
|
|
|
|
* @param string $url A user-submitted URL
|
|
|
|
* @param int $depth The current redirection recursion level (internal)
|
|
|
|
* @param bool $fetchbody Wether to fetch the body or not after the HEAD requests
|
|
|
|
* @return string A canonical URL
|
2019-01-06 22:06:53 +01:00
|
|
|
* @throws \Friendica\Network\HTTPException\InternalServerErrorException
|
2018-01-27 04:51:07 +01:00
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function finalUrl($url, $depth = 1, $fetchbody = false)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
2018-12-28 01:22:35 +01:00
|
|
|
$a = \get_app();
|
2018-01-27 04:51:07 +01:00
|
|
|
|
2018-01-27 14:25:54 +01:00
|
|
|
$url = self::stripTrackingQueryParams($url);
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if ($depth > 10) {
|
2018-08-23 00:46:44 +02:00
|
|
|
return $url;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
$url = trim($url, "'");
|
|
|
|
|
|
|
|
$stamp1 = microtime(true);
|
|
|
|
|
|
|
|
$ch = curl_init();
|
|
|
|
curl_setopt($ch, CURLOPT_URL, $url);
|
|
|
|
curl_setopt($ch, CURLOPT_HEADER, 1);
|
|
|
|
curl_setopt($ch, CURLOPT_NOBODY, 1);
|
|
|
|
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
|
|
|
|
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
2018-10-09 19:58:58 +02:00
|
|
|
curl_setopt($ch, CURLOPT_USERAGENT, $a->getUserAgent());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
curl_exec($ch);
|
|
|
|
$curl_info = @curl_getinfo($ch);
|
|
|
|
$http_code = $curl_info['http_code'];
|
|
|
|
curl_close($ch);
|
|
|
|
|
2019-02-16 23:17:10 +01:00
|
|
|
$a->getProfiler()->saveTimestamp($stamp1, "network", System::callstack());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if ($http_code == 0) {
|
2018-08-23 00:46:44 +02:00
|
|
|
return $url;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
2018-08-23 00:46:44 +02:00
|
|
|
if (in_array($http_code, ['301', '302'])) {
|
|
|
|
if (!empty($curl_info['redirect_url'])) {
|
|
|
|
return self::finalUrl($curl_info['redirect_url'], ++$depth, $fetchbody);
|
|
|
|
} elseif (!empty($curl_info['location'])) {
|
|
|
|
return self::finalUrl($curl_info['location'], ++$depth, $fetchbody);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Check for redirects in the meta elements of the body if there are no redirects in the header.
|
|
|
|
if (!$fetchbody) {
|
2018-01-27 17:13:41 +01:00
|
|
|
return(self::finalUrl($url, ++$depth, true));
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// if the file is too large then exit
|
|
|
|
if ($curl_info["download_content_length"] > 1000000) {
|
2018-08-23 00:46:44 +02:00
|
|
|
return $url;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// if it isn't a HTML file then exit
|
2018-08-23 00:46:44 +02:00
|
|
|
if (!empty($curl_info["content_type"]) && !strstr(strtolower($curl_info["content_type"]), "html")) {
|
|
|
|
return $url;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
$stamp1 = microtime(true);
|
|
|
|
|
|
|
|
$ch = curl_init();
|
|
|
|
curl_setopt($ch, CURLOPT_URL, $url);
|
|
|
|
curl_setopt($ch, CURLOPT_HEADER, 0);
|
|
|
|
curl_setopt($ch, CURLOPT_NOBODY, 0);
|
|
|
|
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
|
|
|
|
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
2018-10-09 19:58:58 +02:00
|
|
|
curl_setopt($ch, CURLOPT_USERAGENT, $a->getUserAgent());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
$body = curl_exec($ch);
|
|
|
|
curl_close($ch);
|
|
|
|
|
2019-02-16 23:17:10 +01:00
|
|
|
$a->getProfiler()->saveTimestamp($stamp1, "network", System::callstack());
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if (trim($body) == "") {
|
2018-08-23 00:46:44 +02:00
|
|
|
return $url;
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Check for redirect in meta elements
|
|
|
|
$doc = new DOMDocument();
|
|
|
|
@$doc->loadHTML($body);
|
|
|
|
|
|
|
|
$xpath = new DomXPath($doc);
|
|
|
|
|
|
|
|
$list = $xpath->query("//meta[@content]");
|
|
|
|
foreach ($list as $node) {
|
|
|
|
$attr = [];
|
|
|
|
if ($node->attributes->length) {
|
|
|
|
foreach ($node->attributes as $attribute) {
|
|
|
|
$attr[$attribute->name] = $attribute->value;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (@$attr["http-equiv"] == 'refresh') {
|
|
|
|
$path = $attr["content"];
|
|
|
|
$pathinfo = explode(";", $path);
|
|
|
|
foreach ($pathinfo as $value) {
|
|
|
|
if (substr(strtolower($value), 0, 4) == "url=") {
|
2018-08-23 00:46:44 +02:00
|
|
|
return self::finalUrl(substr($value, 4), ++$depth);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return $url;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Find the matching part between two url
|
|
|
|
*
|
|
|
|
* @param string $url1
|
|
|
|
* @param string $url2
|
|
|
|
* @return string The matching part
|
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function getUrlMatch($url1, $url2)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
if (($url1 == "") || ($url2 == "")) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
2018-11-08 17:28:29 +01:00
|
|
|
$url1 = Strings::normaliseLink($url1);
|
|
|
|
$url2 = Strings::normaliseLink($url2);
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
$parts1 = parse_url($url1);
|
|
|
|
$parts2 = parse_url($url2);
|
|
|
|
|
|
|
|
if (!isset($parts1["host"]) || !isset($parts2["host"])) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
2018-07-08 14:58:43 +02:00
|
|
|
if (empty($parts1["scheme"])) {
|
|
|
|
$parts1["scheme"] = '';
|
|
|
|
}
|
|
|
|
if (empty($parts2["scheme"])) {
|
|
|
|
$parts2["scheme"] = '';
|
|
|
|
}
|
|
|
|
|
2018-01-27 04:51:07 +01:00
|
|
|
if ($parts1["scheme"] != $parts2["scheme"]) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
2018-07-08 14:58:43 +02:00
|
|
|
if (empty($parts1["host"])) {
|
|
|
|
$parts1["host"] = '';
|
|
|
|
}
|
|
|
|
if (empty($parts2["host"])) {
|
|
|
|
$parts2["host"] = '';
|
|
|
|
}
|
|
|
|
|
2018-01-27 04:51:07 +01:00
|
|
|
if ($parts1["host"] != $parts2["host"]) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
2018-07-08 14:58:43 +02:00
|
|
|
if (empty($parts1["port"])) {
|
|
|
|
$parts1["port"] = '';
|
|
|
|
}
|
|
|
|
if (empty($parts2["port"])) {
|
|
|
|
$parts2["port"] = '';
|
|
|
|
}
|
|
|
|
|
2018-01-27 04:51:07 +01:00
|
|
|
if ($parts1["port"] != $parts2["port"]) {
|
|
|
|
return "";
|
|
|
|
}
|
|
|
|
|
|
|
|
$match = $parts1["scheme"]."://".$parts1["host"];
|
|
|
|
|
|
|
|
if ($parts1["port"]) {
|
|
|
|
$match .= ":".$parts1["port"];
|
|
|
|
}
|
|
|
|
|
2018-07-08 14:58:43 +02:00
|
|
|
if (empty($parts1["path"])) {
|
|
|
|
$parts1["path"] = '';
|
|
|
|
}
|
|
|
|
if (empty($parts2["path"])) {
|
|
|
|
$parts2["path"] = '';
|
|
|
|
}
|
|
|
|
|
2018-01-27 04:51:07 +01:00
|
|
|
$pathparts1 = explode("/", $parts1["path"]);
|
|
|
|
$pathparts2 = explode("/", $parts2["path"]);
|
|
|
|
|
|
|
|
$i = 0;
|
|
|
|
$path = "";
|
|
|
|
do {
|
2018-07-10 14:27:56 +02:00
|
|
|
$path1 = defaults($pathparts1, $i, '');
|
|
|
|
$path2 = defaults($pathparts2, $i, '');
|
2018-01-27 04:51:07 +01:00
|
|
|
|
|
|
|
if ($path1 == $path2) {
|
|
|
|
$path .= $path1."/";
|
|
|
|
}
|
|
|
|
} while (($path1 == $path2) && ($i++ <= count($pathparts1)));
|
|
|
|
|
|
|
|
$match .= $path;
|
|
|
|
|
2018-11-08 17:28:29 +01:00
|
|
|
return Strings::normaliseLink($match);
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @brief Glue url parts together
|
|
|
|
*
|
|
|
|
* @param array $parsed URL parts
|
|
|
|
*
|
|
|
|
* @return string The glued URL
|
|
|
|
*/
|
2018-01-27 17:13:41 +01:00
|
|
|
public static function unparseURL($parsed)
|
2018-01-27 04:51:07 +01:00
|
|
|
{
|
|
|
|
$get = function ($key) use ($parsed) {
|
|
|
|
return isset($parsed[$key]) ? $parsed[$key] : null;
|
|
|
|
};
|
|
|
|
|
|
|
|
$pass = $get('pass');
|
|
|
|
$user = $get('user');
|
|
|
|
$userinfo = $pass !== null ? "$user:$pass" : $user;
|
|
|
|
$port = $get('port');
|
|
|
|
$scheme = $get('scheme');
|
|
|
|
$query = $get('query');
|
|
|
|
$fragment = $get('fragment');
|
|
|
|
$authority = ($userinfo !== null ? $userinfo."@" : '') .
|
|
|
|
$get('host') .
|
|
|
|
($port ? ":$port" : '');
|
|
|
|
|
|
|
|
return (strlen($scheme) ? $scheme.":" : '') .
|
|
|
|
(strlen($authority) ? "//".$authority : '') .
|
|
|
|
$get('path') .
|
|
|
|
(strlen($query) ? "?".$query : '') .
|
|
|
|
(strlen($fragment) ? "#".$fragment : '');
|
|
|
|
}
|
2019-04-08 21:12:10 +02:00
|
|
|
|
|
|
|
|
|
|
|
/**
|
2019-04-08 23:12:34 +02:00
|
|
|
* Switch the scheme of an url between http and https
|
2019-04-08 21:12:10 +02:00
|
|
|
*
|
|
|
|
* @param string $url URL
|
|
|
|
*
|
|
|
|
* @return string switched URL
|
|
|
|
*/
|
|
|
|
public static function switchScheme($url)
|
|
|
|
{
|
2019-04-08 23:12:34 +02:00
|
|
|
$scheme = parse_url($url, PHP_URL_SCHEME);
|
|
|
|
if (empty($scheme)) {
|
2019-04-08 21:12:10 +02:00
|
|
|
return $url;
|
|
|
|
}
|
2019-04-08 23:12:34 +02:00
|
|
|
|
|
|
|
if ($scheme === 'http') {
|
2019-04-08 21:12:10 +02:00
|
|
|
$url = str_replace('http://', 'https://', $url);
|
2019-04-08 23:12:34 +02:00
|
|
|
} elseif ($scheme === 'https') {
|
2019-04-08 21:12:10 +02:00
|
|
|
$url = str_replace('https://', 'http://', $url);
|
|
|
|
}
|
2019-04-08 23:12:34 +02:00
|
|
|
|
2019-04-08 21:12:10 +02:00
|
|
|
return $url;
|
|
|
|
}
|
2018-01-27 04:51:07 +01:00
|
|
|
}
|