Bugfix: Range checking with ParseUrl made problems - we now use the standard

This commit is contained in:
Michael 2017-10-15 19:29:58 +00:00
parent 1441abd15a
commit 1506886947
2 changed files with 9 additions and 45 deletions

View file

@ -65,7 +65,7 @@ function fetch_url($url,$binary = false, &$redirects = 0, $timeout = 0, $accept_
* string 'body' => fetched content * string 'body' => fetched content
*/ */
function z_fetch_url($url, $binary = false, &$redirects = 0, $opts = array()) { function z_fetch_url($url, $binary = false, &$redirects = 0, $opts = array()) {
$ret = array('return_code' => 0, 'success' => false, 'header' => '', 'body' => ''); $ret = array('return_code' => 0, 'success' => false, 'header' => '', 'info' => '', 'body' => '');
$stamp1 = microtime(true); $stamp1 = microtime(true);
@ -173,6 +173,7 @@ function z_fetch_url($url, $binary = false, &$redirects = 0, $opts = array()) {
$base = $s; $base = $s;
$curl_info = @curl_getinfo($ch); $curl_info = @curl_getinfo($ch);
$ret['info'] = $curl_info;
$http_code = $curl_info['http_code']; $http_code = $curl_info['http_code'];
logger('fetch_url ' . $url . ': ' . $http_code . " " . $s, LOGGER_DATA); logger('fetch_url ' . $url . ': ' . $http_code . " " . $s, LOGGER_DATA);

View file

@ -139,53 +139,24 @@ class ParseUrl {
$siteinfo["url"] = $url; $siteinfo["url"] = $url;
$siteinfo["type"] = "link"; $siteinfo["type"] = "link";
$check_cert = Config::get("system", "verifyssl"); $data = z_fetch_url($url);
if (!$data['success']) {
$stamp1 = microtime(true);
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_HEADER, 1);
curl_setopt($ch, CURLOPT_TIMEOUT, 10);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_USERAGENT, $a->get_useragent());
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, (($check_cert) ? true : false));
if ($check_cert) {
@curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
}
$range = intval(Config::get('system', 'curl_range_bytes', 0));
if ($range > 0) {
curl_setopt($ch, CURLOPT_RANGE, '0-' . $range);
}
$header = curl_exec($ch);
$curl_info = @curl_getinfo($ch);
curl_close($ch);
$a->save_timestamp($stamp1, "network");
if ((($curl_info["http_code"] == "301") || ($curl_info["http_code"] == "302") || ($curl_info["http_code"] == "303") || ($curl_info["http_code"] == "307"))
&& (($curl_info["redirect_url"] != "") || ($curl_info["location"] != ""))) {
if ($curl_info["redirect_url"] != "") {
$siteinfo = self::getSiteinfo($curl_info["redirect_url"], $no_guessing, $do_oembed, ++$count);
} else {
$siteinfo = self::getSiteinfo($curl_info["location"], $no_guessing, $do_oembed, ++$count);
}
return($siteinfo); return($siteinfo);
} }
// If the file is too large then exit // If the file is too large then exit
if ($curl_info["download_content_length"] > 1000000) { if ($data["info"]["download_content_length"] > 1000000) {
return($siteinfo); return($siteinfo);
} }
// If it isn't a HTML file then exit // If it isn't a HTML file then exit
if (($curl_info["content_type"] != "") && !strstr(strtolower($curl_info["content_type"]), "html")) { if (($data["info"]["content_type"] != "") && !strstr(strtolower($data["info"]["content_type"]), "html")) {
return($siteinfo); return($siteinfo);
} }
$header = $data["header"];
$body = $data["body"];
if ($do_oembed) { if ($do_oembed) {
$oembed_data = oembed_fetch_url($url); $oembed_data = oembed_fetch_url($url);
@ -217,14 +188,6 @@ class ParseUrl {
$charset = "utf-8"; $charset = "utf-8";
} }
$pos = strpos($header, "\r\n\r\n");
if ($pos) {
$body = trim(substr($header, $pos));
} else {
$body = $header;
}
if (($charset != "") && (strtoupper($charset) != "UTF-8")) { if (($charset != "") && (strtoupper($charset) != "UTF-8")) {
logger("parseurl_getsiteinfo: detected charset ".$charset, LOGGER_DEBUG); logger("parseurl_getsiteinfo: detected charset ".$charset, LOGGER_DEBUG);
//$body = mb_convert_encoding($body, "UTF-8", $charset); //$body = mb_convert_encoding($body, "UTF-8", $charset);