- new cronjob for history/map/x.log file truncate, closed #734
This commit is contained in:
@@ -56,6 +56,9 @@ deleteExpiredCacheData = Cron\Cache->deleteExpiredData, @downtime
|
||||
; delete old statistics (activity log) data
|
||||
deleteStatisticsData = Cron\StatisticsUpdate->deleteStatisticsData, @weekly
|
||||
|
||||
; truncate map history log files
|
||||
truncateMapHistoryLogFiles = Cron\MapHistory->truncateFiles, @halfHour
|
||||
|
||||
; updates small amount of static system data from CCP API
|
||||
;updateUniverseSystems = Cron\Universe->updateUniverseSystems, @instant
|
||||
|
||||
|
||||
125
app/main/cron/maphistory.php
Normal file
125
app/main/cron/maphistory.php
Normal file
@@ -0,0 +1,125 @@
|
||||
<?php
|
||||
/**
|
||||
* Created by PhpStorm.
|
||||
* User: Exodus 4D
|
||||
* Date: 22.12.2018
|
||||
* Time: 15:48
|
||||
*/
|
||||
|
||||
namespace Cron;
|
||||
|
||||
use data\mapper\SortingIterator;
|
||||
use data\file\FileHandler;
|
||||
use data\filesystem\Search;
|
||||
|
||||
class MapHistory extends AbstractCron {
|
||||
|
||||
const LOG_TEXT = '%s [%4s] log files, [%4s] not writable, [%4s] read error, [%4s] write error, [%4s] rename error, [%4s] delete error, exec (%.3Fs)';
|
||||
|
||||
/**
|
||||
* default log file size limit before truncate, bytes (1MB)
|
||||
*/
|
||||
const LOG_SIZE_THRESHOLD = 1024 * 1024;
|
||||
|
||||
/**
|
||||
* default count of log files that will be truncated
|
||||
*/
|
||||
const LOG_COUNT = 3;
|
||||
|
||||
/**
|
||||
* default line limit after truncate
|
||||
*/
|
||||
const LOG_LINES = 1000;
|
||||
|
||||
/**
|
||||
* get max log size threshold before truncate
|
||||
* @param \Base $f3
|
||||
* @return int
|
||||
*/
|
||||
protected function getMaxLogSize(\Base $f3) : int {
|
||||
$logSize = (int)$f3->get('PATHFINDER.HISTORY.LOG_SIZE_THRESHOLD');
|
||||
return ($logSize >= 0) ? ($logSize * 1024 * 1024) : self::LOG_SIZE_THRESHOLD;
|
||||
}
|
||||
|
||||
/**
|
||||
* get max log entries (lines) after truncate
|
||||
* @param \Base $f3
|
||||
* @return int
|
||||
*/
|
||||
protected function getMaxLogLines(\Base $f3) : int {
|
||||
$logLines = (int)$f3->get('PATHFINDER.HISTORY.LOG_LINES');
|
||||
return ($logLines >= 0) ? $logLines : self::LOG_LINES;
|
||||
}
|
||||
|
||||
/**
|
||||
* truncate map history log files and keep size small
|
||||
* >> php index.php "/cron/truncateMapHistoryLogFiles"
|
||||
* @param \Base $f3
|
||||
*/
|
||||
function truncateFiles(\Base $f3){
|
||||
$timeStart = microtime(true);
|
||||
|
||||
$largeFiles = 0;
|
||||
$notWritableFiles = 0;
|
||||
$readErrors = 0;
|
||||
$writeErrors = 0;
|
||||
$renameErrors = 0;
|
||||
$deleteErrors = 0;
|
||||
|
||||
if($f3->exists('PATHFINDER.HISTORY.LOG', $dir)){
|
||||
$fileHandler = FileHandler::instance();
|
||||
|
||||
$dir = $f3->fixslashes('./' . $dir . 'map/');
|
||||
$files = Search::getFilesBySize($dir, $this->getMaxLogSize($f3));
|
||||
|
||||
// sort by file size
|
||||
$files = new SortingIterator($files, function( \SplFileInfo $a, \SplFileInfo $b){
|
||||
return $b->getSize() - $a->getSize();
|
||||
});
|
||||
|
||||
// limit files count for truncate
|
||||
$files = new \LimitIterator($files, 0, self::LOG_COUNT);
|
||||
|
||||
foreach($files as $filename => $file){
|
||||
/**
|
||||
* @var $file \SplFileInfo
|
||||
*/
|
||||
if($file->isFile()){
|
||||
$largeFiles++;
|
||||
if($file->isWritable()){
|
||||
// read newest logs from large files (reverse order) -> new log entries were appended...
|
||||
$rowsData = $fileHandler->readFileReverse($file->getRealPath(), 0, self::LOG_LINES);
|
||||
if(!empty($rowsData)){
|
||||
// create temp file...
|
||||
$temp = tempnam(sys_get_temp_dir(), 'map_');
|
||||
// write newest logs into temp file...
|
||||
$fileSizeNew = file_put_contents($temp, implode(PHP_EOL, array_reverse($rowsData)) . PHP_EOL, LOCK_EX);
|
||||
if($fileSizeNew){
|
||||
// move temp file from PHP temp dir into Pathfinders history log dir...
|
||||
// ... overwrite old log file with new file
|
||||
if(rename($temp, $file->getRealPath())){
|
||||
// map history logs should be writable non cronjob user too
|
||||
@chmod($file->getRealPath(), 0666);
|
||||
}else{
|
||||
$renameErrors++;
|
||||
}
|
||||
}else{
|
||||
$writeErrors++;
|
||||
}
|
||||
}else{
|
||||
$readErrors++;
|
||||
}
|
||||
}else{
|
||||
$notWritableFiles++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
$execTime = microtime(true) - $timeStart;
|
||||
|
||||
// Log ------------------------
|
||||
$log = new \Log('cron_' . __FUNCTION__ . '.log');
|
||||
$log->write(sprintf(self::LOG_TEXT, __FUNCTION__, $largeFiles, $notWritableFiles, $readErrors, $writeErrors, $renameErrors, $deleteErrors, $execTime));
|
||||
}
|
||||
}
|
||||
@@ -34,7 +34,7 @@ class Universe extends AbstractCron {
|
||||
}
|
||||
|
||||
/**
|
||||
* format Byte §size for output
|
||||
* format Byte $size for output
|
||||
* @param int $size
|
||||
* @return string
|
||||
*/
|
||||
|
||||
@@ -21,20 +21,19 @@ class FileHandler extends \Prefab {
|
||||
const Log_File_LIMIT_MAX = 100;
|
||||
|
||||
/**
|
||||
* parse local log file from end to first line
|
||||
* -> Each row is a JSON object
|
||||
* parse file from end to first line
|
||||
* @param string $sourceFile
|
||||
* @param int $offset
|
||||
* @param int $limit
|
||||
* @param null|callable $formatter
|
||||
* @param \Closure|null $rowParser
|
||||
* @return array
|
||||
*/
|
||||
public static function readLogFile(
|
||||
public function readFileReverse(
|
||||
string $sourceFile,
|
||||
int $offset = self::LOG_FILE_OFFSET,
|
||||
int $limit = self::LOG_FILE_LIMIT,
|
||||
$formatter = null
|
||||
): array {
|
||||
\Closure $rowParser = null
|
||||
) : array {
|
||||
$data = [];
|
||||
|
||||
if(is_file($sourceFile)){
|
||||
@@ -43,11 +42,11 @@ class FileHandler extends \Prefab {
|
||||
$file->setFlags(\SplFileObject::DROP_NEW_LINE | \SplFileObject::READ_AHEAD | \SplFileObject::SKIP_EMPTY);
|
||||
|
||||
foreach( new \LimitIterator($file, 0, $limit) as $i => $rowData){
|
||||
if( !empty($rowDataObj = (array)json_decode($rowData, true)) ){
|
||||
if(is_callable($formatter)){
|
||||
$formatter($rowDataObj);
|
||||
}
|
||||
$data[] = $rowDataObj;
|
||||
if(is_callable($rowParser)){
|
||||
// custom parser for row data -> manipulate $data by ref
|
||||
$rowParser($rowData, $data);
|
||||
}else{
|
||||
$data[] = $rowData;
|
||||
}
|
||||
}
|
||||
}else{
|
||||
|
||||
@@ -12,58 +12,79 @@ namespace data\filesystem;
|
||||
class Search {
|
||||
|
||||
/**
|
||||
* max file count that should be deleted in this session
|
||||
* max file count that can be returned
|
||||
*/
|
||||
const DEFAULT_FILE_LIMIT = 1000;
|
||||
|
||||
/**
|
||||
* timestamp (seconds) filter files by mTime()
|
||||
* -> default = "no filter"
|
||||
* @var int
|
||||
*/
|
||||
static $filterTime = 0;
|
||||
|
||||
/**
|
||||
* recursive file filter by mTime
|
||||
* @param string $dir
|
||||
* @param int $mTime
|
||||
* @param null $mTime
|
||||
* @param int $limit
|
||||
* @return array|\LimitIterator
|
||||
* @return \Traversable
|
||||
*/
|
||||
static function getFilesByMTime(string $dir, $mTime = null, $limit = self::DEFAULT_FILE_LIMIT){
|
||||
$files = [];
|
||||
static function getFilesByMTime(string $dir, $mTime = null, $limit = self::DEFAULT_FILE_LIMIT) : \Traversable {
|
||||
$mTime = is_null($mTime) ? time() : (int)$mTime;
|
||||
|
||||
if(is_dir($dir)){
|
||||
if(is_null($mTime)){
|
||||
self::$filterTime = time();
|
||||
}else{
|
||||
self::$filterTime = (int)$mTime;
|
||||
$filterCallback = function($current, $key, $iterator) use ($mTime) {
|
||||
/**
|
||||
* @var $current \RecursiveDirectoryIterator
|
||||
*/
|
||||
if (
|
||||
!$current->isFile() || // allow recursion
|
||||
(
|
||||
strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore"
|
||||
$current->getMTime() < $mTime // filter last modification date
|
||||
)
|
||||
){
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
$directory = new \RecursiveDirectoryIterator( $dir, \FilesystemIterator::SKIP_DOTS );
|
||||
$files = new \RecursiveCallbackFilterIterator($directory, function ($current, $key, $iterator) {
|
||||
|
||||
// Check for last modification date
|
||||
/**
|
||||
* @var $current \RecursiveDirectoryIterator
|
||||
*/
|
||||
if (
|
||||
!$current->isFile() || // allow recursion
|
||||
(
|
||||
strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore"
|
||||
$current->getMTime() < self::$filterTime // check last modification date
|
||||
)
|
||||
){
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
// limit max files
|
||||
$files = new \LimitIterator($files, 0, $limit);
|
||||
}
|
||||
|
||||
return $files;
|
||||
return self::getFilesByCallback($dir, $filterCallback, $limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* recursive file filter by size
|
||||
* @param string $dir
|
||||
* @param int $size
|
||||
* @param int $limit
|
||||
* @return \Traversable
|
||||
*/
|
||||
static function getFilesBySize(string $dir, int $size = 0, int $limit = self::DEFAULT_FILE_LIMIT) : \Traversable {
|
||||
|
||||
$filterCallback = function($current, $key, $iterator) use ($size) {
|
||||
/**
|
||||
* @var $current \RecursiveDirectoryIterator
|
||||
*/
|
||||
if (
|
||||
!$current->isFile() || // allow recursion
|
||||
(
|
||||
strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore"
|
||||
$current->getSize() > $size // filter file size
|
||||
)
|
||||
){
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
return self::getFilesByCallback($dir, $filterCallback, $limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param string $dir
|
||||
* @param \Closure $filterCallback
|
||||
* @param int $limit
|
||||
* @return \Traversable
|
||||
*/
|
||||
private static function getFilesByCallback(string $dir, \Closure $filterCallback, int $limit = self::DEFAULT_FILE_LIMIT) : \Traversable {
|
||||
$files = new \ArrayIterator();
|
||||
if(is_dir($dir)){
|
||||
$directory = new \RecursiveDirectoryIterator( $dir, \FilesystemIterator::SKIP_DOTS );
|
||||
$files = new \RecursiveCallbackFilterIterator($directory, $filterCallback);
|
||||
}
|
||||
return new \LimitIterator($files, 0, $limit);
|
||||
}
|
||||
}
|
||||
20
app/main/data/mapper/sortingiterator.php
Normal file
20
app/main/data/mapper/sortingiterator.php
Normal file
@@ -0,0 +1,20 @@
|
||||
<?php
|
||||
/**
|
||||
* Created by PhpStorm.
|
||||
* User: Exodus 4D
|
||||
* Date: 24.12.2018
|
||||
* Time: 00:55
|
||||
*/
|
||||
|
||||
namespace data\mapper;
|
||||
|
||||
|
||||
class SortingIterator extends \ArrayIterator {
|
||||
|
||||
public function __construct(\Traversable $iterator, callable $callback){
|
||||
parent::__construct(iterator_to_array($iterator));
|
||||
|
||||
// sort by custom function
|
||||
$this->uasort($callback);
|
||||
}
|
||||
}
|
||||
@@ -420,6 +420,9 @@ abstract class AbstractLog implements LogInterface {
|
||||
$params = [];
|
||||
if( !empty($conf = $this->handlerParamsConfig['stream']) ){
|
||||
$params[] = $conf->stream;
|
||||
$params[] = Logger::toMonologLevel($this->getLevel()); // min level that is handled;
|
||||
$params[] = true; // bubble
|
||||
$params[] = 0666; // permissions (default 644)
|
||||
}
|
||||
|
||||
return $params;
|
||||
|
||||
@@ -1037,6 +1037,10 @@ class MapModel extends AbstractMapTrackingModel {
|
||||
];
|
||||
}
|
||||
|
||||
/**
|
||||
* map log formatter callback
|
||||
* @return \Closure
|
||||
*/
|
||||
protected function getLogFormatter(){
|
||||
return function(&$rowDataObj){
|
||||
unset($rowDataObj['extra']);
|
||||
@@ -1258,9 +1262,18 @@ class MapModel extends AbstractMapTrackingModel {
|
||||
* @param int $limit
|
||||
* @return array
|
||||
*/
|
||||
public function getLogData(int $offset = FileHandler::LOG_FILE_OFFSET, int $limit = FileHandler::LOG_FILE_LIMIT): array {
|
||||
public function getLogData(int $offset = FileHandler::LOG_FILE_OFFSET, int $limit = FileHandler::LOG_FILE_LIMIT) : array {
|
||||
$streamConf = $this->getStreamConfig();
|
||||
return FileHandler::readLogFile($streamConf->stream, $offset, $limit, $this->getLogFormatter());
|
||||
|
||||
$rowFormatter = $this->getLogFormatter();
|
||||
$rowParser = function(string &$rowData, array &$data) use ($rowFormatter){
|
||||
if( !empty($rowDataObj = (array)json_decode($rowData, true)) ){
|
||||
$rowFormatter($rowDataObj);
|
||||
$data[] = $rowDataObj;
|
||||
}
|
||||
};
|
||||
|
||||
return FileHandler::instance()->readFileReverse($streamConf->stream, $offset, $limit, $rowParser);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -353,6 +353,16 @@ CACHE = 5
|
||||
; Default: history/
|
||||
LOG = history/
|
||||
|
||||
; Max file size for 'history' logs before getting truncated by cronjob
|
||||
; Syntax: Integer (MB)
|
||||
; Default: 2
|
||||
LOG_SIZE_THRESHOLD = 2
|
||||
|
||||
; log entries (lines) after file getting truncated by cronjob
|
||||
; Syntax: Integer
|
||||
; Default: 1000
|
||||
LOG_LINES = 1000
|
||||
|
||||
; ADMIN ===========================================================================================
|
||||
; "SUPER" admins and additional "CORPORATION" admins can be added here
|
||||
;[PATHFINDER.ROLES]
|
||||
|
||||
Reference in New Issue
Block a user