diff --git a/app/cron.ini b/app/cron.ini index 3d37ac96..08671e47 100644 --- a/app/cron.ini +++ b/app/cron.ini @@ -56,6 +56,9 @@ deleteExpiredCacheData = Cron\Cache->deleteExpiredData, @downtime ; delete old statistics (activity log) data deleteStatisticsData = Cron\StatisticsUpdate->deleteStatisticsData, @weekly +; truncate map history log files +truncateMapHistoryLogFiles = Cron\MapHistory->truncateFiles, @halfHour + ; updates small amount of static system data from CCP API ;updateUniverseSystems = Cron\Universe->updateUniverseSystems, @instant diff --git a/app/main/cron/maphistory.php b/app/main/cron/maphistory.php new file mode 100644 index 00000000..330c6a58 --- /dev/null +++ b/app/main/cron/maphistory.php @@ -0,0 +1,125 @@ +get('PATHFINDER.HISTORY.LOG_SIZE_THRESHOLD'); + return ($logSize >= 0) ? ($logSize * 1024 * 1024) : self::LOG_SIZE_THRESHOLD; + } + + /** + * get max log entries (lines) after truncate + * @param \Base $f3 + * @return int + */ + protected function getMaxLogLines(\Base $f3) : int { + $logLines = (int)$f3->get('PATHFINDER.HISTORY.LOG_LINES'); + return ($logLines >= 0) ? $logLines : self::LOG_LINES; + } + + /** + * truncate map history log files and keep size small + * >> php index.php "/cron/truncateMapHistoryLogFiles" + * @param \Base $f3 + */ + function truncateFiles(\Base $f3){ + $timeStart = microtime(true); + + $largeFiles = 0; + $notWritableFiles = 0; + $readErrors = 0; + $writeErrors = 0; + $renameErrors = 0; + $deleteErrors = 0; + + if($f3->exists('PATHFINDER.HISTORY.LOG', $dir)){ + $fileHandler = FileHandler::instance(); + + $dir = $f3->fixslashes('./' . $dir . 'map/'); + $files = Search::getFilesBySize($dir, $this->getMaxLogSize($f3)); + + // sort by file size + $files = new SortingIterator($files, function( \SplFileInfo $a, \SplFileInfo $b){ + return $b->getSize() - $a->getSize(); + }); + + // limit files count for truncate + $files = new \LimitIterator($files, 0, self::LOG_COUNT); + + foreach($files as $filename => $file){ + /** + * @var $file \SplFileInfo + */ + if($file->isFile()){ + $largeFiles++; + if($file->isWritable()){ + // read newest logs from large files (reverse order) -> new log entries were appended... + $rowsData = $fileHandler->readFileReverse($file->getRealPath(), 0, self::LOG_LINES); + if(!empty($rowsData)){ + // create temp file... + $temp = tempnam(sys_get_temp_dir(), 'map_'); + // write newest logs into temp file... + $fileSizeNew = file_put_contents($temp, implode(PHP_EOL, array_reverse($rowsData)) . PHP_EOL, LOCK_EX); + if($fileSizeNew){ + // move temp file from PHP temp dir into Pathfinders history log dir... + // ... overwrite old log file with new file + if(rename($temp, $file->getRealPath())){ + // map history logs should be writable non cronjob user too + @chmod($file->getRealPath(), 0666); + }else{ + $renameErrors++; + } + }else{ + $writeErrors++; + } + }else{ + $readErrors++; + } + }else{ + $notWritableFiles++; + } + } + } + } + + $execTime = microtime(true) - $timeStart; + + // Log ------------------------ + $log = new \Log('cron_' . __FUNCTION__ . '.log'); + $log->write(sprintf(self::LOG_TEXT, __FUNCTION__, $largeFiles, $notWritableFiles, $readErrors, $writeErrors, $renameErrors, $deleteErrors, $execTime)); + } +} \ No newline at end of file diff --git a/app/main/cron/universe.php b/app/main/cron/universe.php index 7ff0613a..52bf8776 100644 --- a/app/main/cron/universe.php +++ b/app/main/cron/universe.php @@ -34,7 +34,7 @@ class Universe extends AbstractCron { } /** - * format Byte §size for output + * format Byte $size for output * @param int $size * @return string */ diff --git a/app/main/data/file/filehandler.php b/app/main/data/file/filehandler.php index be313301..739a3d61 100644 --- a/app/main/data/file/filehandler.php +++ b/app/main/data/file/filehandler.php @@ -21,20 +21,19 @@ class FileHandler extends \Prefab { const Log_File_LIMIT_MAX = 100; /** - * parse local log file from end to first line - * -> Each row is a JSON object + * parse file from end to first line * @param string $sourceFile * @param int $offset * @param int $limit - * @param null|callable $formatter + * @param \Closure|null $rowParser * @return array */ - public static function readLogFile( + public function readFileReverse( string $sourceFile, int $offset = self::LOG_FILE_OFFSET, int $limit = self::LOG_FILE_LIMIT, - $formatter = null - ): array { + \Closure $rowParser = null + ) : array { $data = []; if(is_file($sourceFile)){ @@ -43,11 +42,11 @@ class FileHandler extends \Prefab { $file->setFlags(\SplFileObject::DROP_NEW_LINE | \SplFileObject::READ_AHEAD | \SplFileObject::SKIP_EMPTY); foreach( new \LimitIterator($file, 0, $limit) as $i => $rowData){ - if( !empty($rowDataObj = (array)json_decode($rowData, true)) ){ - if(is_callable($formatter)){ - $formatter($rowDataObj); - } - $data[] = $rowDataObj; + if(is_callable($rowParser)){ + // custom parser for row data -> manipulate $data by ref + $rowParser($rowData, $data); + }else{ + $data[] = $rowData; } } }else{ diff --git a/app/main/data/filesystem/search.php b/app/main/data/filesystem/search.php index d94958fe..8037b1f5 100644 --- a/app/main/data/filesystem/search.php +++ b/app/main/data/filesystem/search.php @@ -12,58 +12,79 @@ namespace data\filesystem; class Search { /** - * max file count that should be deleted in this session + * max file count that can be returned */ const DEFAULT_FILE_LIMIT = 1000; - /** - * timestamp (seconds) filter files by mTime() - * -> default = "no filter" - * @var int - */ - static $filterTime = 0; - /** * recursive file filter by mTime * @param string $dir - * @param int $mTime + * @param null $mTime * @param int $limit - * @return array|\LimitIterator + * @return \Traversable */ - static function getFilesByMTime(string $dir, $mTime = null, $limit = self::DEFAULT_FILE_LIMIT){ - $files = []; + static function getFilesByMTime(string $dir, $mTime = null, $limit = self::DEFAULT_FILE_LIMIT) : \Traversable { + $mTime = is_null($mTime) ? time() : (int)$mTime; - if(is_dir($dir)){ - if(is_null($mTime)){ - self::$filterTime = time(); - }else{ - self::$filterTime = (int)$mTime; + $filterCallback = function($current, $key, $iterator) use ($mTime) { + /** + * @var $current \RecursiveDirectoryIterator + */ + if ( + !$current->isFile() || // allow recursion + ( + strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore" + $current->getMTime() < $mTime // filter last modification date + ) + ){ + return true; } + return false; + }; - $directory = new \RecursiveDirectoryIterator( $dir, \FilesystemIterator::SKIP_DOTS ); - $files = new \RecursiveCallbackFilterIterator($directory, function ($current, $key, $iterator) { - - // Check for last modification date - /** - * @var $current \RecursiveDirectoryIterator - */ - if ( - !$current->isFile() || // allow recursion - ( - strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore" - $current->getMTime() < self::$filterTime // check last modification date - ) - ){ - return true; - } - return false; - }); - - // limit max files - $files = new \LimitIterator($files, 0, $limit); - } - - return $files; + return self::getFilesByCallback($dir, $filterCallback, $limit); } + /** + * recursive file filter by size + * @param string $dir + * @param int $size + * @param int $limit + * @return \Traversable + */ + static function getFilesBySize(string $dir, int $size = 0, int $limit = self::DEFAULT_FILE_LIMIT) : \Traversable { + + $filterCallback = function($current, $key, $iterator) use ($size) { + /** + * @var $current \RecursiveDirectoryIterator + */ + if ( + !$current->isFile() || // allow recursion + ( + strpos($current->getFilename(), '.') !== 0 && // skip e.g. ".gitignore" + $current->getSize() > $size // filter file size + ) + ){ + return true; + } + return false; + }; + + return self::getFilesByCallback($dir, $filterCallback, $limit); + } + + /** + * @param string $dir + * @param \Closure $filterCallback + * @param int $limit + * @return \Traversable + */ + private static function getFilesByCallback(string $dir, \Closure $filterCallback, int $limit = self::DEFAULT_FILE_LIMIT) : \Traversable { + $files = new \ArrayIterator(); + if(is_dir($dir)){ + $directory = new \RecursiveDirectoryIterator( $dir, \FilesystemIterator::SKIP_DOTS ); + $files = new \RecursiveCallbackFilterIterator($directory, $filterCallback); + } + return new \LimitIterator($files, 0, $limit); + } } \ No newline at end of file diff --git a/app/main/data/mapper/sortingiterator.php b/app/main/data/mapper/sortingiterator.php new file mode 100644 index 00000000..875bc4dc --- /dev/null +++ b/app/main/data/mapper/sortingiterator.php @@ -0,0 +1,20 @@ +uasort($callback); + } +} \ No newline at end of file diff --git a/app/main/lib/logging/AbstractLog.php b/app/main/lib/logging/AbstractLog.php index 645d771c..27f0edc4 100644 --- a/app/main/lib/logging/AbstractLog.php +++ b/app/main/lib/logging/AbstractLog.php @@ -420,6 +420,9 @@ abstract class AbstractLog implements LogInterface { $params = []; if( !empty($conf = $this->handlerParamsConfig['stream']) ){ $params[] = $conf->stream; + $params[] = Logger::toMonologLevel($this->getLevel()); // min level that is handled; + $params[] = true; // bubble + $params[] = 0666; // permissions (default 644) } return $params; diff --git a/app/main/model/mapmodel.php b/app/main/model/mapmodel.php index c229c9aa..7ee50c67 100644 --- a/app/main/model/mapmodel.php +++ b/app/main/model/mapmodel.php @@ -1037,6 +1037,10 @@ class MapModel extends AbstractMapTrackingModel { ]; } + /** + * map log formatter callback + * @return \Closure + */ protected function getLogFormatter(){ return function(&$rowDataObj){ unset($rowDataObj['extra']); @@ -1258,9 +1262,18 @@ class MapModel extends AbstractMapTrackingModel { * @param int $limit * @return array */ - public function getLogData(int $offset = FileHandler::LOG_FILE_OFFSET, int $limit = FileHandler::LOG_FILE_LIMIT): array { + public function getLogData(int $offset = FileHandler::LOG_FILE_OFFSET, int $limit = FileHandler::LOG_FILE_LIMIT) : array { $streamConf = $this->getStreamConfig(); - return FileHandler::readLogFile($streamConf->stream, $offset, $limit, $this->getLogFormatter()); + + $rowFormatter = $this->getLogFormatter(); + $rowParser = function(string &$rowData, array &$data) use ($rowFormatter){ + if( !empty($rowDataObj = (array)json_decode($rowData, true)) ){ + $rowFormatter($rowDataObj); + $data[] = $rowDataObj; + } + }; + + return FileHandler::instance()->readFileReverse($streamConf->stream, $offset, $limit, $rowParser); } /** diff --git a/app/pathfinder.ini b/app/pathfinder.ini index 77d7fa6a..85b1f1b9 100644 --- a/app/pathfinder.ini +++ b/app/pathfinder.ini @@ -353,6 +353,16 @@ CACHE = 5 ; Default: history/ LOG = history/ +; Max file size for 'history' logs before getting truncated by cronjob +; Syntax: Integer (MB) +; Default: 2 +LOG_SIZE_THRESHOLD = 2 + +; log entries (lines) after file getting truncated by cronjob +; Syntax: Integer +; Default: 1000 +LOG_LINES = 1000 + ; ADMIN =========================================================================================== ; "SUPER" admins and additional "CORPORATION" admins can be added here ;[PATHFINDER.ROLES]