add backup functions
This commit is contained in:
parent
3523f26c63
commit
62337771a2
86
src/backup.js
Normal file
86
src/backup.js
Normal file
|
@ -0,0 +1,86 @@
|
|||
/*
|
||||
* Creates regular backups of the canvas in png tiles
|
||||
* In order to run huge redis operations, you have to allow redis to use
|
||||
* more virtual memory, with:
|
||||
* vm.overcommit_memory = 1 in /etc/sysctl.conf and `sysctl vm.overcommit_memory=1`
|
||||
* also:
|
||||
* echo never > /sys/kernel/mm/transparent_hugepage/enabled
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import fs from 'fs';
|
||||
import redis from 'redis';
|
||||
import bluebird from 'bluebird';
|
||||
|
||||
/*
|
||||
* use low cpu priority
|
||||
*/
|
||||
import process from 'process';
|
||||
import { spawn } from 'child_process';
|
||||
const priority = 15;
|
||||
const proc= spawn("renice", [priority, process.pid]);
|
||||
proc.on('exit', function (code) {
|
||||
if (code !== 0){
|
||||
console.log("renice failed with code - " +code);
|
||||
}
|
||||
console.log('Useing low cpu priority');
|
||||
});
|
||||
// -------------------
|
||||
|
||||
|
||||
bluebird.promisifyAll(redis.RedisClient.prototype);
|
||||
bluebird.promisifyAll(redis.Multi.prototype);
|
||||
|
||||
import canvases from './canvases.json';
|
||||
import {
|
||||
updateBackupRedis,
|
||||
createPngBackup,
|
||||
incrementialBackupRedis,
|
||||
} from './core/tilesBackup';
|
||||
|
||||
const {
|
||||
CANVAS_REDIS_URL,
|
||||
BACKUP_REDIS_URL,
|
||||
BACKUP_DIR,
|
||||
} = process.env;
|
||||
if (!CANVAS_REDIS_URL || !BACKUP_REDIS_URL || !BACKUP_DIR) {
|
||||
throw new Error(
|
||||
'You did not set CANVAS_REDIS_URL, BACKUP_REDIS_URL or BACKUP_DIR',
|
||||
);
|
||||
}
|
||||
|
||||
const canvasRedis = redis
|
||||
.createClient(CANVAS_REDIS_URL, { return_buffers: true });
|
||||
const backupRedis = redis
|
||||
.createClient(BACKUP_REDIS_URL, { return_buffers: true });
|
||||
canvasRedis.on('error', () => {
|
||||
throw new Error('Could not connect to canvas redis');
|
||||
});
|
||||
backupRedis.on('error', () => {
|
||||
throw new Error('Could not connect to backup redis');
|
||||
});
|
||||
|
||||
|
||||
function dailyBackup() {
|
||||
if (!fs.existsSync(BACKUP_DIR)) {
|
||||
throw new Error(`Backup directory ${backupDir} does not exist!`);
|
||||
}
|
||||
|
||||
backupRedis.flushall('ASYNC', async () => {
|
||||
const date = new Date();
|
||||
const dayDir = `${date.getFullYear()}${date.getMonth() + 1}${date.getDate()}`;
|
||||
const backupDir = `${BACKUP_DIR}/${dayDir}`;
|
||||
if (!fs.existsSync(backupDir)) {
|
||||
fs.mkdirSync(backupDir);
|
||||
}
|
||||
await updateBackupRedis(canvasRedis, backupRedis, canvases);
|
||||
await createPngBackup(backupRedis, canvases, backupDir);
|
||||
await incrementialBackupRedis(canvasRedis, backupRedis, canvases, backupDir);
|
||||
console.log(`Daily backup ${dayDir} done`);
|
||||
});
|
||||
}
|
||||
|
||||
dailyBackup();
|
|
@ -12,7 +12,6 @@ import type { Palette } from './Palette';
|
|||
import logger from './logger';
|
||||
import { getMaxTiledZoom } from './utils';
|
||||
import { TILE_SIZE, TILE_ZOOM_LEVEL } from './constants';
|
||||
import RedisCanvas from '../data/models/RedisCanvas';
|
||||
|
||||
/*
|
||||
* Deletes a subtile from a tile (paints it in color 0), if we wouldn't do it, it would be black
|
||||
|
@ -110,6 +109,7 @@ function tileFileName(canvasTileFolder: string, cell: Cell): string {
|
|||
|
||||
/*
|
||||
* @param canvasSize dimension of the canvas (pixels width/height)
|
||||
* @param redisCanvas Redis Canvas object
|
||||
* @param canvasId id of the canvas
|
||||
* @param canvasTileFolder root folder where to save tiles
|
||||
* @param palette Palette to use
|
||||
|
@ -117,6 +117,7 @@ function tileFileName(canvasTileFolder: string, cell: Cell): string {
|
|||
* @return true if successfully created tile, false if tile empty
|
||||
*/
|
||||
export async function createZoomTileFromChunk(
|
||||
redisCanvas: Object,
|
||||
canvasSize: number,
|
||||
canvasId: number,
|
||||
canvasTileFolder: string,
|
||||
|
@ -135,7 +136,7 @@ export async function createZoomTileFromChunk(
|
|||
let chunk = null;
|
||||
for (let dy = 0; dy < TILE_ZOOM_LEVEL; dy += 1) {
|
||||
for (let dx = 0; dx < TILE_ZOOM_LEVEL; dx += 1) {
|
||||
chunk = await RedisCanvas.getChunk(xabs + dx, yabs + dy, canvasId);
|
||||
chunk = await redisCanvas.getChunk(xabs + dx, yabs + dy, canvasId);
|
||||
if (!chunk) {
|
||||
na.push([dx, dy]);
|
||||
continue;
|
||||
|
@ -264,6 +265,7 @@ export async function createEmptyTile(
|
|||
|
||||
/*
|
||||
* created 4096x4096 texture of default canvas
|
||||
* @param redisCanvas Redis Canvas object
|
||||
* @param canvasId numberical Id of canvas
|
||||
* @param canvasSize size of canvas
|
||||
* @param canvasTileFolder root folder where to save texture
|
||||
|
@ -271,6 +273,7 @@ export async function createEmptyTile(
|
|||
*
|
||||
*/
|
||||
export async function createTexture(
|
||||
redisCanvas: Object,
|
||||
canvasId: number,
|
||||
canvasSize: numbr,
|
||||
canvasTileFolder,
|
||||
|
@ -300,7 +303,7 @@ export async function createTexture(
|
|||
} else {
|
||||
for (let dy = 0; dy < amount; dy += 1) {
|
||||
for (let dx = 0; dx < amount; dx += 1) {
|
||||
chunk = await RedisCanvas.getChunk(dx, dy, canvasId);
|
||||
chunk = await redisCanvas.getChunk(dx, dy, canvasId);
|
||||
if (!chunk) {
|
||||
na.push([dx, dy]);
|
||||
continue;
|
||||
|
@ -338,6 +341,7 @@ export async function createTexture(
|
|||
|
||||
/*
|
||||
* Create all tiles
|
||||
* @param redisCanvas Redis Canvas object
|
||||
* @param canvasSize dimension of the canvas (pixels width/height)
|
||||
* @param canvasId id of the canvas
|
||||
* @param canvasTileFolder root foler where to save tiles
|
||||
|
@ -345,6 +349,7 @@ export async function createTexture(
|
|||
* @param force overwrite existing tiles
|
||||
*/
|
||||
export async function initializeTiles(
|
||||
redisCanvas: Object,
|
||||
canvasSize: number,
|
||||
canvasId: number,
|
||||
canvasTileFolder: string,
|
||||
|
@ -373,6 +378,7 @@ export async function initializeTiles(
|
|||
const filename = `${canvasTileFolder}/${zoom}/${cx}/${cy}.png`;
|
||||
if (force || !fs.existsSync(filename)) {
|
||||
const ret = await createZoomTileFromChunk(
|
||||
redisCanvas,
|
||||
canvasSize,
|
||||
canvasId,
|
||||
canvasTileFolder,
|
||||
|
@ -416,7 +422,13 @@ export async function initializeTiles(
|
|||
);
|
||||
}
|
||||
// create snapshot texture
|
||||
await createTexture(canvasId, canvasSize, canvasTileFolder, palette);
|
||||
await createTexture(
|
||||
redisCanvas,
|
||||
canvasId,
|
||||
canvasSize,
|
||||
canvasTileFolder,
|
||||
palette,
|
||||
);
|
||||
//--
|
||||
logger.info(
|
||||
`Tiling: Elapsed Time: ${Math.round((Date.now() - startTime) / 1000)} for canvas${canvasId}`,
|
||||
|
|
212
src/core/tilesBackup.js
Normal file
212
src/core/tilesBackup.js
Normal file
|
@ -0,0 +1,212 @@
|
|||
/*
|
||||
* Offer functions for Canvas backups
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/* eslint-disable no-console */
|
||||
|
||||
import sharp from 'sharp';
|
||||
import fs from 'fs';
|
||||
import Palette from './Palette';
|
||||
|
||||
import { TILE_SIZE } from './constants';
|
||||
|
||||
|
||||
/*
|
||||
* Copy canvases from one redis instance to another
|
||||
* @param canvasRedis redis from where to get the data
|
||||
* @param backupRedis redis where to write the data to
|
||||
* @param canvases Object with all canvas informations
|
||||
*/
|
||||
export async function updateBackupRedis(canvasRedis, backupRedis, canvases) {
|
||||
const ids = Object.keys(canvases);
|
||||
for (let i = 0; i < ids.length; i += 1) {
|
||||
const id = ids[i];
|
||||
const canvas = canvases[id];
|
||||
const chunksXY = (canvas.size / TILE_SIZE);
|
||||
console.log('Copy Chunks to backup redis...');
|
||||
const startTime = Date.now();
|
||||
let amount = 0;
|
||||
for (let x = 0; x < chunksXY; x++) {
|
||||
for (let y = 0; y < chunksXY; y++) {
|
||||
const key = `ch:${id}:${x}:${y}`;
|
||||
/*
|
||||
* await on every iteration is fine because less resource usage
|
||||
* in exchange for higher execution time is wanted.
|
||||
*/
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const chunk = await canvasRedis.getAsync(key);
|
||||
if (chunk) {
|
||||
const setNXArgs = [key, chunk];
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await backupRedis.sendCommandAsync('SET', setNXArgs);
|
||||
amount += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
const time = Date.now() - startTime;
|
||||
console.log(`Finished Copying ${amount} chunks in ${time}ms.`);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Create incremential PNG tile backup between two redis canvases
|
||||
* @param canvasRedis redis from where to get the data
|
||||
* @param backupRedis redis where to write the data to
|
||||
* @param canvases Object with all canvas informations
|
||||
*/
|
||||
export async function incrementialBackupRedis(
|
||||
canvasRedis,
|
||||
backupRedis,
|
||||
canvases,
|
||||
backupDir: string,
|
||||
) {
|
||||
const ids = Object.keys(canvases);
|
||||
for (let i = 0; i < ids.length; i += 1) {
|
||||
const id = ids[i];
|
||||
|
||||
|
||||
const canvasBackupDir = `${backupDir}/${id}`;
|
||||
if (!fs.existsSync(canvasBackupDir)) {
|
||||
fs.mkdirSync(canvasBackupDir);
|
||||
}
|
||||
const hourOfDay = new Date().getHours();
|
||||
const canvasTileBackupDir = `${canvasBackupDir}/${hourOfDay}`;
|
||||
if (!fs.existsSync(canvasTileBackupDir)) {
|
||||
fs.mkdirSync(canvasTileBackupDir);
|
||||
}
|
||||
|
||||
const canvas = canvases[id];
|
||||
const palette = new Palette(canvas.colors, canvas.alpha);
|
||||
const chunksXY = (canvas.size / TILE_SIZE);
|
||||
console.log('Creating Incremential Backup...');
|
||||
const startTime = Date.now();
|
||||
let amount = 0;
|
||||
for (let x = 0; x < chunksXY; x++) {
|
||||
const xBackupDir = `${canvasTileBackupDir}/${x}`;
|
||||
let createdDir = false;
|
||||
|
||||
for (let y = 0; y < chunksXY; y++) {
|
||||
const key = `ch:${id}:${x}:${y}`;
|
||||
/*
|
||||
* await on every iteration is fine because less resource usage
|
||||
* in exchange for higher execution time is wanted.
|
||||
*/
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const curChunk = await canvasRedis.getAsync(key);
|
||||
let tileBuffer = null;
|
||||
if (curChunk) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const oldChunk = await backupRedis.getAsync(key);
|
||||
if (oldChunk) {
|
||||
let pxl = 0;
|
||||
while (pxl < curChunk.length) {
|
||||
if (curChunk[pxl] !== oldChunk[pxl]) {
|
||||
if (!tileBuffer) {
|
||||
tileBuffer = new Uint32Array(TILE_SIZE * TILE_SIZE);
|
||||
}
|
||||
const color = palette.abgr[curChunk[pxl]];
|
||||
tileBuffer[pxl] = color;
|
||||
}
|
||||
pxl += 1;
|
||||
}
|
||||
} else {
|
||||
tileBuffer = curChunk;
|
||||
}
|
||||
}
|
||||
if (tileBuffer) {
|
||||
if (!createdDir && !fs.existsSync(xBackupDir)) {
|
||||
createdDir = true;
|
||||
fs.mkdirSync(xBackupDir);
|
||||
}
|
||||
const filename = `${xBackupDir}/${y}.png`;
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await sharp(
|
||||
Buffer.from(tileBuffer.buffer), {
|
||||
raw: {
|
||||
width: TILE_SIZE,
|
||||
height: TILE_SIZE,
|
||||
channels: 4,
|
||||
},
|
||||
},
|
||||
).toFile(filename);
|
||||
amount += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
const time = Date.now() - startTime;
|
||||
console.log(
|
||||
`Finished Incremential backup of ${amount} chunks in ${time}ms.`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Backup all tiles as PNG files into folder
|
||||
* @param redisClient RedisClient
|
||||
* @param canvases Object with the informations to all canvases
|
||||
* @param backupDir directory where to save png tiles
|
||||
*/
|
||||
export async function createPngBackup(
|
||||
redisClient: Object,
|
||||
canvases: Object,
|
||||
backupDir: string,
|
||||
) {
|
||||
const ids = Object.keys(canvases);
|
||||
for (let i = 0; i < ids.length; i += 1) {
|
||||
const id = ids[i];
|
||||
|
||||
const canvasBackupDir = `${backupDir}/${id}`;
|
||||
if (!fs.existsSync(canvasBackupDir)) {
|
||||
fs.mkdirSync(canvasBackupDir);
|
||||
}
|
||||
const canvasTileBackupDir = `${canvasBackupDir}/tiles`;
|
||||
if (!fs.existsSync(canvasTileBackupDir)) {
|
||||
fs.mkdirSync(canvasTileBackupDir);
|
||||
}
|
||||
|
||||
const canvas = canvases[id];
|
||||
const palette = new Palette(canvas.colors, canvas.alpha);
|
||||
const chunksXY = (canvas.size / TILE_SIZE);
|
||||
console.log('Create PNG tiles from backup...');
|
||||
const startTime = Date.now();
|
||||
let amount = 0;
|
||||
for (let x = 0; x < chunksXY; x++) {
|
||||
const xBackupDir = `${canvasTileBackupDir}/${x}`;
|
||||
if (!fs.existsSync(xBackupDir)) {
|
||||
fs.mkdirSync(xBackupDir);
|
||||
}
|
||||
for (let y = 0; y < chunksXY; y++) {
|
||||
const key = `ch:${id}:${x}:${y}`;
|
||||
/*
|
||||
* await on every iteration is fine because less resource usage
|
||||
* in exchange for higher execution time is wanted.
|
||||
*/
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const chunk = await redisClient.getAsync(key);
|
||||
if (chunk) {
|
||||
const textureBuffer = palette.buffer2RGB(chunk);
|
||||
const filename = `${xBackupDir}/${y}.png`;
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await sharp(
|
||||
Buffer.from(textureBuffer.buffer), {
|
||||
raw: {
|
||||
width: TILE_SIZE,
|
||||
height: TILE_SIZE,
|
||||
channels: 3,
|
||||
},
|
||||
},
|
||||
).toFile(filename);
|
||||
amount += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
const time = Date.now() - startTime;
|
||||
console.log(
|
||||
`Finished creating PNG backup of ${amount} chunks in ${time}ms.`,
|
||||
);
|
||||
}
|
||||
}
|
|
@ -10,6 +10,7 @@ import type { Cell } from './Cell';
|
|||
import logger from './logger';
|
||||
import canvases from '../canvases.json';
|
||||
import Palette from './Palette';
|
||||
import RedisCanvas from '../data/models/RedisCanvas';
|
||||
|
||||
import { TILE_FOLDER } from './config';
|
||||
import {
|
||||
|
@ -64,6 +65,7 @@ class CanvasUpdater {
|
|||
|
||||
if (zoom === this.maxTiledZoom - 1) {
|
||||
await createZoomTileFromChunk(
|
||||
RedisCanvas,
|
||||
this.canvas.size,
|
||||
this.id,
|
||||
this.canvasTileFolder,
|
||||
|
@ -79,7 +81,13 @@ class CanvasUpdater {
|
|||
}
|
||||
|
||||
if (zoom === 0) {
|
||||
createTexture(this.id, this.canvas.size, this.canvasTileFolder, this.palette);
|
||||
createTexture(
|
||||
RedisCanvas,
|
||||
this.id,
|
||||
this.canvas.size,
|
||||
this.canvasTileFolder,
|
||||
this.palette,
|
||||
);
|
||||
} else {
|
||||
const [ucx, ucy] = [cx, cy].map((z) => Math.floor(z / 4));
|
||||
const upperTile = ucx + ucy * (TILE_ZOOM_LEVEL ** (zoom - 1));
|
||||
|
@ -103,7 +111,9 @@ class CanvasUpdater {
|
|||
const chunkOffset = cx + cy * this.firstZoomtileWidth;
|
||||
if (~queue.indexOf(chunkOffset)) return;
|
||||
queue.push(chunkOffset);
|
||||
logger.info(`Tiling: Enqueued ${cx}, ${cy} / ${this.id} for basezoom reload`);
|
||||
logger.info(
|
||||
`Tiling: Enqueued ${cx}, ${cy} / ${this.id} for basezoom reload`,
|
||||
);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -128,6 +138,7 @@ class CanvasUpdater {
|
|||
'Tiling: tiledir empty, will initialize it, this can take some time',
|
||||
);
|
||||
await initializeTiles(
|
||||
RedisCanvas,
|
||||
this.canvas.size,
|
||||
this.id,
|
||||
this.canvasTileFolder,
|
||||
|
@ -154,6 +165,7 @@ class CanvasUpdater {
|
|||
export function registerChunkChange(canvasId: number, chunk: Cell) {
|
||||
return CanvasUpdaters[canvasId].registerChunkChange(chunk);
|
||||
}
|
||||
RedisCanvas.setChunkChangeCallback(registerChunkChange);
|
||||
|
||||
export function registerPixelChange(canvasId: number, pixel: Cell) {
|
||||
return CanvasUpdaters[canvasId].registerPixelChange(pixel);
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
/* @flow */
|
||||
|
||||
import { getChunkOfPixel, getOffsetOfPixel } from '../../core/utils';
|
||||
import { registerChunkChange } from '../../core/tileserver';
|
||||
import { TILE_SIZE } from '../../core/constants';
|
||||
import canvases from '../../canvases.json';
|
||||
import logger from '../../core/logger';
|
||||
|
@ -19,7 +18,14 @@ const chunks: Set<string> = new Set();
|
|||
|
||||
|
||||
class RedisCanvas {
|
||||
// callback that gets informed about chunk changes
|
||||
static registerChunkChange = () => undefined;
|
||||
static setChunkChangeCallback(cb) {
|
||||
RedisCanvas.registerChunkChange = cb;
|
||||
}
|
||||
|
||||
static getChunk(i: number, j: number, canvasId: number): Promise<Buffer> {
|
||||
// this key is also hardcoded into core/tilesBackup.js
|
||||
return redis.getAsync(`ch:${canvasId}:${i}:${j}`);
|
||||
}
|
||||
|
||||
|
@ -31,7 +37,7 @@ class RedisCanvas {
|
|||
}
|
||||
const key = `ch:${canvasId}:${i}:${j}`;
|
||||
await redis.setAsync(key, Buffer.from(chunk.buffer));
|
||||
registerChunkChange(canvasId, [i, j]);
|
||||
RedisCanvas.registerChunkChange(canvasId, [i, j]);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -63,7 +69,7 @@ class RedisCanvas {
|
|||
|
||||
const args = [key, 'SET', UINT_SIZE, `#${offset}`, color];
|
||||
await redis.sendCommandAsync('bitfield', args);
|
||||
registerChunkChange(canvasId, [i, j]);
|
||||
RedisCanvas.registerChunkChange(canvasId, [i, j]);
|
||||
}
|
||||
|
||||
static async getPixel(
|
||||
|
|
|
@ -30,33 +30,4 @@ async function copyChunks() {
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Creating new basechunk if the sizes are the same, just the colors chaned
|
||||
* @param x x coordinates of chunk (in chunk coordinates, not pixel coordinates)
|
||||
* @param y y coordinates of chunk (in chunk coordinates, not pixel coordinates)
|
||||
*/
|
||||
async function createBasechunk(x: number, y: number): Uint8Array {
|
||||
const key = `chunk:${x}:${y}`;
|
||||
const newChunk = new Uint8Array(TILE_SIZE * TILE_SIZE);
|
||||
|
||||
const smallchunk = await oldredis.getAsync(key);
|
||||
if (!smallchunk) {
|
||||
return
|
||||
}
|
||||
|
||||
const oldChunk = new Uint8Array(smallchunk);
|
||||
if (oldChunk.length != newChunk.length || oldChunk.length != TILE_SIZE * TILE_SIZE) {
|
||||
console.log(`ERROR: Chunk length ${oldChunk.length} of chunk ${x},${y} not of correct size!`);
|
||||
}
|
||||
|
||||
for (let px = 0; px < oldChunk.length; px += 1) {
|
||||
newChunk[px] = colorConvert(oldChunk[px]);
|
||||
}
|
||||
|
||||
const setNXArgs = [key, Buffer.from(newChunk.buffer).toString('binary')]
|
||||
await newredis.sendCommandAsync('SETNX', setNXArgs);
|
||||
console.log("Created Chunk ", key);
|
||||
}
|
||||
|
||||
|
||||
copyChunks();
|
||||
|
|
Loading…
Reference in New Issue
Block a user