diff --git a/src/backup.js b/src/backup.js index bfda54f3..755c6163 100644 --- a/src/backup.js +++ b/src/backup.js @@ -110,7 +110,7 @@ function getDateFolder() { let day = date.getDate(); if (month < 10) month = `0${month}`; if (day < 10) day = `0${day}`; - const dayDir = `${date.getFullYear()}${month}${day}`; + const dayDir = `${date.getFullYear()}/${month}/${day}`; const backupDir = `${dir}/${dayDir}`; return backupDir; } @@ -118,13 +118,11 @@ function getDateFolder() { async function dailyBackup() { const backupDir = getDateFolder(); if (!fs.existsSync(backupDir)) { - fs.mkdirSync(backupDir); + fs.mkdirSync(backupDir, { recursive: true }); } await backupRedis.flushallAsync('ASYNC'); - if (!fs.existsSync(backupDir)) { - fs.mkdirSync(backupDir); - } + try { await updateBackupRedis(canvasRedis, backupRedis, canvases); await createPngBackup(backupRedis, canvases, backupDir); @@ -137,7 +135,7 @@ async function dailyBackup() { async function incrementialBackup() { const backupDir = getDateFolder(); if (!fs.existsSync(backupDir)) { - fs.mkdirSync(backupDir); + fs.mkdirSync(backupDir, { recursive: true }); } try { await incrementialBackupRedis( diff --git a/src/routes/api/history.js b/src/routes/api/history.js index 6f48d51e..9d423adc 100644 --- a/src/routes/api/history.js +++ b/src/routes/api/history.js @@ -10,12 +10,15 @@ import { BACKUP_DIR } from '../../core/config'; async function history(req: Request, res: Response) { const { day, id } = req.query; - if (!BACKUP_DIR || !day || !id || day.includes('/') || day.includes('\\')) { + if (!BACKUP_DIR || !day || !id + || day.includes('/') || day.includes('\\') || day.length !== 8 + ) { res.status(404).end(); } const yyyy = day.slice(0, 4); - const mmdd = day.slice(4); - const path = `${BACKUP_DIR}/${yyyy}/${mmdd}/${id}`; + const mm = day.slice(4, 2); + const dd = day.slice(6); + const path = `${BACKUP_DIR}/${yyyy}/${mm}/${dd}/${id}`; try { if (!fs.existsSync(path)) { diff --git a/src/ui/ChunkLoader2D.js b/src/ui/ChunkLoader2D.js index ec884d57..93a20e41 100644 --- a/src/ui/ChunkLoader2D.js +++ b/src/ui/ChunkLoader2D.js @@ -255,7 +255,7 @@ class ChunkLoader { const center = [historicalCanvasMaxTiledZoom, cx, cy]; // eslint-disable-next-line max-len - let url = `${window.ssv.backupurl}/${historicalDate.slice(0, 4)}/${historicalDate.slice(4)}}/`; + let url = `${window.ssv.backupurl}/${historicalDate.slice(0, 4)}/${historicalDate.slice(4, 2)}/${historicalDate.slice(6)}/`; if (historicalTime) { // incremential tiles url += `${canvasId}/${historicalTime}/${cx}/${cy}.png`; diff --git a/utils/README.md b/utils/README.md index f0d8a31d..4267b9eb 100644 --- a/utils/README.md +++ b/utils/README.md @@ -51,7 +51,7 @@ same as historyDownload, just that its designed for running on the storage serve ## backupSync.sh shell script that can be launched with backup.js to sync to a storage server after every backup. It uses rsync which is much faster than ftp, sftp or any other methode. ### Note: -Backups aka historical view does use lots of files and might eventually hit the inode limit of your file system, consider to use mksquashfs to compress past backups into one image +Backups aka historical view does use lots of files and might eventually hit the inode limit of your file system, consider to use mksquashfs to compress past backups into one read-only image and mount them ## liveLog.sh shell script that watches the pixel.log file and outputs the stats of the current IPs placing there diff --git a/utils/backupSync.sh b/utils/backupSync.sh index 61099895..6b30c711 100644 --- a/utils/backupSync.sh +++ b/utils/backupSync.sh @@ -1,17 +1,20 @@ -#!/bin/sh +#!/bin/bash TMPDIR="/tmp/backup" +DATE_TODAY=`printf "%(%Y/%m/%d)T" -1` +DATE_YESTERDAY=`printf "%(%Y/%m/%d)T" $(( $(printf "%(%s)T" -1) - 24*3600 ))` + #delete older daily backup folders from local filesystem if exist -cd ${TMPDIR} -if [ "`ls -t | wc -l`" -gt "1" ] +if [ -d "${TMPDIR}/${DATE_YESTERDAY}" ] then - ls -t | tail -n +2 | xargs rm -rf -- + rm -rf "${TMPDIR}/${DATE_YESTERDAY}" fi -cd - > /dev/null + +exit rsync -r ${TMPDIR}/ backup@ayylmao:/backup/pixelplanet/canvas #clear current daily folder #we do NOT delete the daily folder itself, because the backup script would create #a new full backup if its missing -rm -rf ${TMPDIR}/*/* +rm -rf ${TMPDIR}/${DATE_TODAY}/* diff --git a/utils/historyCopy.py b/utils/historyCopy.py deleted file mode 100755 index 4b17529c..00000000 --- a/utils/historyCopy.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/python3 - -import PIL.Image -import sys, io, os -import datetime -import json -import threading - -# minus half the canvas size -offset = int(-256 * 256 / 2) - -class GetDay(threading.Thread): - def __init__(self, x, y, w, h, iter_date, cnt): - threading.Thread.__init__(self) - self.x = x - self.y = y - self.w = w - self.h = h - self.iter_date = iter_date - self.cnt = cnt * 1000 - self.daemon = True - def run(self): - iter_date = self.iter_date - x = self.x - y = self.y - w = self.w - h = self.h - cnt = self.cnt - xc = (x - offset) // 256 - wc = (x + w - offset) // 256 - yc = (y - offset) // 256 - hc = (y + h - offset) // 256 - print('------------------------------------------------') - print('Getting frames for date %s' % (iter_date)) - image = PIL.Image.new('RGBA', (w, h)) - for iy in range(yc, hc + 1): - for ix in range(xc, wc + 1): - path = './canvas/%s/0/tiles/%s/%s.png' % (iter_date, ix, iy) - offx = ix * 256 + offset - x - offy = iy * 256 + offset - y - img = PIL.Image.open(path).convert('RGBA') - image.paste(img, (offx, offy), img) - img.close() - print('Got start of day') - cnt += 1 - image.save('./timelapse/t%06d.png' % (cnt)) - time_list = os.listdir('./canvas/%s/%s' % (iter_date, 0)) - for time in time_list: - if time == 'tiles': - continue - for iy in range(yc, hc + 1): - for ix in range(xc, wc + 1): - path = './canvas/%s/0/%s/%s/%s.png' % (iter_date, time, ix, iy) - if not os.path.exists(path): - continue - offx = ix * 256 + offset - x - offy = iy * 256 + offset - y - img = PIL.Image.open(path).convert('RGBA') - image.paste(img, (offx, offy), img) - img.close() - print('Got time %s' % (time)) - cnt += 1 - image.save('./timelapse/t%06d.png' % (cnt)) - image.close() - -def get_area(x, y, w, h, start_date, end_date): - delta = datetime.timedelta(days=1) - end_date = end_date.strftime("%Y%m%d") - iter_date = None - cnt = 0 - threads = [] - while iter_date != end_date: - iter_date = start_date.strftime("%Y%m%d") - start_date = start_date + delta - thread = GetDay(x, y, w, h, iter_date, cnt) - thread.start() - threads.append(thread) - cnt += 1 - for t in threads: - t.join() - - -if __name__ == "__main__": - if len(sys.argv) != 4 and len(sys.argv) != 5: - print("Download history of an area of pixelplanet - useful for timelapses") - print("Usage: historyDownload.py startX_startY endX_endY amount_days") - print("→start_date and end_date are in YYYY-MM-dd formate") - print("→user R key on pixelplanet to copy coordinates)") - print("→images will be saved into timelapse folder)") - print("-----------") - print("You can create a timelapse from the resulting files with ffmpeg like that:") - print("ffmpeg -framerate 15 -f image2 -i timelapse/t%06d.png -c:v libx264 -pix_fmt yuva420p output.mp4") - else: - start = sys.argv[1].split('_') - end = sys.argv[2].split('_') - amount_days = datetime.timedelta(days=int(sys.argv[3])); - end_date = datetime.date.today() - start_date = end_date - amount_days - x = int(start[0]) - y = int(start[1]) - w = int(end[0]) - x - h =int( end[1]) - y - if not os.path.exists('./timelapse'): - os.mkdir('./timelapse') - get_area(x, y, w, h, start_date, end_date) - print("Done!") - print("to create a timelapse from it:") - print("ffmpeg -framerate 15 -f image2 -i timelapse/t%06d.png -c:v libx264 -pix_fmt yuva420p output.mp4") - diff --git a/utils/historyDownload.py b/utils/historyDownload.py index 8de01518..b4bb24ec 100755 --- a/utils/historyDownload.py +++ b/utils/historyDownload.py @@ -120,7 +120,7 @@ async def get_area(canvas, x, y, w, h, start_date, end_date): image = PIL.Image.new('RGBA', (w, h)) for iy in range(yc, hc + 1): for ix in range(xc, wc + 1): - url = 'https://storage.pixelplanet.fun/%s/%s/%s/tiles/%s/%s.png' % (iter_date[0:4], iter_date[4:], canvas_id, ix, iy) + url = 'https://storage.pixelplanet.fun/%s/%s/%s/%s/tiles/%s/%s.png' % (iter_date[0:4], iter_date[4:6] , iter_date[6:], canvas_id, ix, iy) offx = ix * 256 + offset - x offy = iy * 256 + offset - y tasks.append(fetch(session, url, offx, offy, image, bkg, True)) @@ -153,7 +153,7 @@ async def get_area(canvas, x, y, w, h, start_date, end_date): image_rel = image.copy() for iy in range(yc, hc + 1): for ix in range(xc, wc + 1): - url = 'https://storage.pixelplanet.fun/%s/%s/%s/%s/%s/%s.png' % (iter_date[0:4], iter_date[4:], canvas_id, time, ix, iy) + url = 'https://storage.pixelplanet.fun/%s/%s/%s/%s/%s/%s/%s.png' % (iter_date[0:4], iter_date[4:6] , iter_date[6:], canvas_id, time, ix, iy) offx = ix * 256 + offset - x offy = iy * 256 + offset - y tasks.append(fetch(session, url, offx, offy, image_rel, bkg))