Merge pull request #1926 from annando/1510-permit-crawler

New option to permit crawlers
This commit is contained in:
Tobias Diekershoff 2015-10-04 08:53:32 +02:00
commit 05b2836d66
3 changed files with 44 additions and 7 deletions

View File

@ -309,16 +309,25 @@ function xml_status($st, $message = '') {
if(! function_exists('http_status_exit')) {
function http_status_exit($val) {
function http_status_exit($val, $description = array()) {
$err = '';
if($val >= 400)
if($val >= 400) {
$err = 'Error';
if (!isset($description["title"]))
$description["title"] = $err." ".$val;
}
if($val >= 200 && $val < 300)
$err = 'OK';
logger('http_status_exit ' . $val);
header($_SERVER["SERVER_PROTOCOL"] . ' ' . $val . ' ' . $err);
if (isset($description["title"])) {
$tpl = get_markup_template('http_status.tpl');
echo replace_macros($tpl, array('$title' => $description["title"],
'$description' => $description["description"]));
}
killme();
}}

View File

@ -95,10 +95,29 @@ function search_content(&$a) {
}
if(get_config('system','local_search') AND !local_user()) {
notice(t('Public access denied.').EOL);
return;
//http_status_exit(403);
//killme();
http_status_exit(403,
array("title" => t("Public access denied."),
"description" => t("Only logged in users are permitted to perform a search.")));
killme();
//notice(t('Public access denied.').EOL);
//return;
}
if (get_config('system','permit_crawling') AND !local_user()) {
// To-Do:
// - 10 requests are "free", after the 11th only a call per minute is allowed
$remote = $_SERVER["REMOTE_ADDR"];
$result = Cache::get("remote_search:".$remote);
if (!is_null($result)) {
if ($result > (time() - 60)) {
http_status_exit(429,
array("title" => t("Too Many Requests"),
"description" => t("Only one search per minute is permitted for not logged in users.")));
killme();
}
}
Cache::set("remote_search:".$remote, time(), CACHE_HOUR);
}
nav_set_selected('search');

View File

@ -0,0 +1,9 @@
<html>
<head>
<title>{{$title}}</title>
</head>
<body>
<h1>{{$title}}</h1>
<p>{{$description}}</p>
</body>
</html>