viewgit/inc/functions.php:22 Function utf8_encode() is deprecated [8192]
Filename | |
---|---|
src/controllers/ResourceController.php | |
src/controllers/components/SocialComponent.php | |
src/library/WikiParser.php | |
src/views/CrawlstatusView.php | |
src/views/elements/ManagecrawlsElement.php |
diff --git a/src/controllers/ResourceController.php b/src/controllers/ResourceController.php index 604d4319e..8e49f2039 100644 --- a/src/controllers/ResourceController.php +++ b/src/controllers/ResourceController.php @@ -304,7 +304,7 @@ class ResourceController extends Controller implements CrawlConstants $this->recordViewSession($page_id, $sub_path, $media_name); $prefix_word = (isset($_REQUEST['t'])) ? 't' : ''; $base_subfolder = L\crawlHash( - 'group' . $group_id. $page_id . C\AUTH_KEY); + 'group' . $group_id . $page_id . C\AUTH_KEY); $prefix_folder = substr($base_subfolder, 0, 3); $subfolder = $prefix_word . $base_subfolder; $add_to_path = true; diff --git a/src/controllers/components/SocialComponent.php b/src/controllers/components/SocialComponent.php index 8c4601a2a..c236cfc88 100644 --- a/src/controllers/components/SocialComponent.php +++ b/src/controllers/components/SocialComponent.php @@ -2098,7 +2098,7 @@ class SocialComponent extends Component implements CrawlConstants $page[self::DESCRIPTION] = $parser->parse($description); $page[self::DESCRIPTION] = $group_model->insertResourcesParsePage($item['GROUP_ID'], - "post".$item['ID'], + "post" . $item['ID'], $locale_tag, $page[self::DESCRIPTION]); $page[self::DESCRIPTION] = preg_replace('/\[{token}\]/', $csrf_token, $page[self::DESCRIPTION]); @@ -2866,7 +2866,13 @@ class SocialComponent extends Component implements CrawlConstants if (!empty($_REQUEST['clear']) && !empty($_SESSION['seen_media']) && is_array($_SESSION['seen_media'])) { $media_name = $parent->clean($_REQUEST['clear'], 'file_name'); - $hash_id = L\crawlHash($data["PAGE_ID"]. $media_name . $sub_path); + $type = UrlParser::getDocumentType($media_name); + if ($type != "") { + $media_name = UrlParser::getDocumentFilename($media_name); + $media_name = urlencode($media_name); + $media_name = "$media_name.$type"; + } + $hash_id = L\crawlHash($data["PAGE_ID"] . $media_name . $sub_path); if (in_array($hash_id, $_SESSION['seen_media'])) { $_SESSION['seen_media'] = array_diff($_SESSION['seen_media'], [$hash_id]); @@ -3670,9 +3676,9 @@ EOD; $page_string .= "<div class='media-container'>"; if (!empty($sub_path)) { $page_string .= "((resource:$media_name|$sub_path". - "|$file_name))"; + "|$file_name ))"; } else { - $page_string .= "((resource:$media_name|$file_name))"; + $page_string .= "((resource:$media_name|$file_name ))"; } $page_string .= "</div>"; $include_charts_and_spreadsheets = ($mime_type == 'text/csv') ? diff --git a/src/library/WikiParser.php b/src/library/WikiParser.php index 0437439cb..9801ae4f0 100644 --- a/src/library/WikiParser.php +++ b/src/library/WikiParser.php @@ -307,7 +307,7 @@ class WikiParser implements CrawlConstants $semi_pos = (strpos($line, ";")) ? strpos($line, ";"): strlen($line); $line = substr($line, 0, $semi_pos); - $line_parts = explode("=",$line); + $line_parts = explode("=", $line); if (count($line_parts) == 2) { $head_vars[trim(addslashes($line_parts[0]))] = addslashes(trim($line_parts[1])); diff --git a/src/views/CrawlstatusView.php b/src/views/CrawlstatusView.php index 19bda48b0..8a4f4aeb7 100755 --- a/src/views/CrawlstatusView.php +++ b/src/views/CrawlstatusView.php @@ -380,7 +380,7 @@ class CrawlstatusView extends View C\CSRF_TOKEN . "=" . $data[C\CSRF_TOKEN] ?>"><?= tl('crawlstatus_view_options') ?></a><?= " " .$this->helper("helpbutton")->render( - "Start Crawl", $data[C\CSRF_TOKEN]) ?> + "New Crawl", $data[C\CSRF_TOKEN]) ?> </p> </form><?php } diff --git a/src/views/elements/ManagecrawlsElement.php b/src/views/elements/ManagecrawlsElement.php index 52d2d83fd..e76bab605 100755 --- a/src/views/elements/ManagecrawlsElement.php +++ b/src/views/elements/ManagecrawlsElement.php @@ -89,7 +89,7 @@ class ManagecrawlsElement extends Element function doUpdate() { var sec = 1000; - var minute = 60*sec; + var minute = 60 * sec; crawlStatusUpdate(); updateId = setInterval("crawlStatusUpdate()", 30*sec); setTimeout("clearUpdate()", 20*minute + sec);