New option to permit crawlers

This commit is contained in:
Michael Vogel 2015-10-03 23:16:40 +02:00
parent a102fd83e8
commit 2840bf2293
2 changed files with 35 additions and 7 deletions

View file

@ -309,16 +309,25 @@ function xml_status($st, $message = '') {
if(! function_exists('http_status_exit')) {
function http_status_exit($val) {
function http_status_exit($val, $description = array()) {
$err = '';
if($val >= 400)
if($val >= 400) {
$err = 'Error';
if (!isset($description["title"]))
$description["title"] = $err." ".$val;
}
if($val >= 200 && $val < 300)
$err = 'OK';
logger('http_status_exit ' . $val);
header($_SERVER["SERVER_PROTOCOL"] . ' ' . $val . ' ' . $err);
if (isset($description["title"])) {
$tpl = get_markup_template('http_status.tpl');
echo replace_macros($tpl, array('$title' => $description["title"],
'$description' => $description["description"]));
}
killme();
}}

View file

@ -95,10 +95,29 @@ function search_content(&$a) {
}
if(get_config('system','local_search') AND !local_user()) {
notice(t('Public access denied.').EOL);
return;
//http_status_exit(403);
//killme();
http_status_exit(403,
array("title" => t("Public access denied."),
"description" => t("Only logged in users are permitted to perform a search.")));
killme();
//notice(t('Public access denied.').EOL);
//return;
}
if (get_config('system','permit_crawling') AND !local_user()) {
// To-Do:
// - 10 requests are "free", after the 11th only a call per minute is allowed
$remote = $_SERVER["REMOTE_ADDR"];
$result = Cache::get("remote_search:".$remote);
if (!is_null($result)) {
if ($result > (time() - 60)) {
http_status_exit(429,
array("title" => t("Too Many Requests"),
"description" => t("Only one search per minute is permitted for not logged in users.")));
killme();
}
}
Cache::set("remote_search:".$remote, time(), CACHE_HOUR);
}
nav_set_selected('search');