make it harder to create duplicate globaldir entries
This commit is contained in:
parent
9d01103af7
commit
574f074dc0
2 changed files with 19 additions and 5 deletions
|
@ -241,3 +241,4 @@ function post_remote($a,$arr) {
|
|||
|
||||
return $current_post;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,14 +31,15 @@ function submit_content(&$a) {
|
|||
$parms = scrape_dfrn($url);
|
||||
|
||||
|
||||
if((! count($parms)) || (validate_dfrn($parms)))
|
||||
if((! count($parms)) || (validate_dfrn($parms))) {
|
||||
exit;
|
||||
}
|
||||
|
||||
if((x($parms,'hide')) || (! (x($parms,'fn')) && (x($parms,'photo')))) {
|
||||
if($profile_exists) {
|
||||
nuke_record($url);
|
||||
}
|
||||
return;
|
||||
exit;
|
||||
}
|
||||
|
||||
$photo = $parms['photo'];
|
||||
|
@ -103,11 +104,23 @@ function submit_content(&$a) {
|
|||
);
|
||||
logger('Insert returns: ' . $r);
|
||||
|
||||
$r = q("SELECT `id` FROM `profile` WHERE `homepage` = '%s' LIMIT 1",
|
||||
dbesc($url)
|
||||
$r = q("SELECT `id` FROM `profile` WHERE ( `homepage` = '%s' or `nurl` = '%s' ) order by id asc",
|
||||
dbesc($url),
|
||||
dbesc($nurl)
|
||||
);
|
||||
|
||||
if(count($r))
|
||||
$profile_id = $r[0]['id'];
|
||||
$profile_id = $r[count($r) - 1]['id'];
|
||||
|
||||
if(count($r) > 1)
|
||||
q("DELETE FROM `photo` WHERE `profile-id` = %d LIMIT 1",
|
||||
intval($r[0]['id'])
|
||||
);
|
||||
q("DELETE FROM `profile` WHERE `id` = %d LIMIT 1",
|
||||
intval($r[0]['id'])
|
||||
);
|
||||
|
||||
|
||||
}
|
||||
|
||||
if($parms['tags']) {
|
||||
|
|
Loading…
Reference in a new issue