clean up utils folder

This commit is contained in:
HF 2022-08-12 16:09:52 +02:00
parent 0a5bb9aca7
commit 4e74912655
9 changed files with 2 additions and 741 deletions

View File

@ -23,20 +23,8 @@ Used to generate tiles based on a uv texture that can then be drawn on the canva
## country-locations
Generates a json list of country codes and their coordinates on the canvas based on lat and lon
## redis-convert.js
Script to convert redis canvas database to different color / different layout
## redis-copy.js
Script to copy a canvas from one redis to another, with different keys if neccessary
## sql-commandtest.js
Script that connects to the mysql database and does some stuff, just for testing
## proxyConvert.sh
Converts a proxy list in specific txt format to a better readable list
## imageClean.py
python3 script that takes an input image and cleares spare pixels and bot remains
## redisMoveCanvas.js
Script to move canvas chunks, i.e. for resizing canvas
## areaDownload.py
downloads an area of the canvas into a png file.
@ -48,18 +36,11 @@ downloads the history from an canvas area between two dates.
Useage: `historyDownload.py canvasId startX_startY endX_endY start_date end_date
This is used for creating timelapses, see the cmd help to know how
## liveLog.sh
shell script that watches the pixel.log file and outputs the stats of the current IPs placing there
Usage: `./liveLog.sh LOGFILE CANVASID STARTX_STARTY ENDX_ENDY`
## pp-center\*.png
center logo of pixelplanet
## change-canvasbackup
just a script that got run once to add the missing tiles in historical view when increasing the size of the moon canvas.
## uploadImage.js
nodejs script to upload a Image to the canvas without checks and without caring about what was previously there, don't use it for anything other than initially loading a very large image to the canvas.
## testStore.js
used to test our own [connect-redis](https://github.com/tj/connect-redis) fork in src/utils/connectRedis.js

View File

@ -1,51 +0,0 @@
#!/usr/bin/env python3
# this script filters out noise froma n indexed image
import PIL.Image
import sys
def check_pixel(pix, x, y):
# if pixel is sourrounded by just the same color
# and max one different one
cnt_clr1 = 0
cnt_clr2 = 0
clr1 = pix[x-1,y-1]
clr2 = None
for xrel in range(-1, 2):
for yrel in range(-1, 2):
if not xrel and not yrel:
continue
clr = pix[x + xrel,y + yrel]
if clr == clr1:
cnt_clr1 += 1
elif clr2 is None:
clr2 = clr
cnt_clr2 += 1
elif clr == clr2:
cnt_clr2 += 1
else:
return None
if cnt_clr1 > 1 and cnt_clr2 > 1:
return None
if cnt_clr1 > 1:
return clr1
return clr2
def clean_image(filename):
im = PIL.Image.open(filename).convert('RGBA')
width, height = im.size
pix = im.load()
im_new = PIL.Image.new('RGBA', (width, height), (255, 0, 0, 0))
pix_new = im_new.load()
for x in range(1, width - 1):
for y in range(1, height - 1):
target = check_pixel(pix, x, y)
if target is not None and target != pix[x, y]:
pix_new[x,y] = target
im.close()
im_new.save("%s-cleaned.png" % filename[:-4])
im_new.close()
if __name__ == "__main__":
filename = sys.argv[1]
clean_image(filename)

View File

@ -1,75 +0,0 @@
#!/bin/bash
# this script parses the pixellogs live and shows which IPs are currently active in
# a given area and where they placed their last pixel
# Usage: ./liveLog.sh LOGFILE CANVASID STARTX_STARTY ENDX_ENDY
LOGFILE=$1
CANVAS=$2
STARTCOORDS=$3
ENDCOORDS=$4
STARTX=`echo ${STARTCOORDS} | sed 's/_.*$//'`
STARTY=`echo ${STARTCOORDS} | sed 's/^.*_//'`
ENDX=`echo ${ENDCOORDS} | sed 's/_.*$//'`
ENDY=`echo ${ENDCOORDS} | sed 's/^.*_//'`
if [ "$#" -ne 4 ]
then
echo " Usage: ./liveLog.sh LOGFILE CANVASID STARTX_STARTY ENDX_ENDY"
echo ""
echo "this script parses the pixellogs live and shows which IPs are currently active in "
echo "a given area and where they placed their last pixel"
exit 1
fi
parse_log()
{
while read -r -a args
do
CAN=${args[2]}
X=${args[3]}
Y=${args[4]}
if [ "$CAN" -eq "$CANVAS" -a "$X" -ge "$STARTX" -a "$X" -le "$ENDX" -a "$Y" -ge "$STARTY" -a "$Y" -le "$ENDY" ]
then
IP=${args[0]}
CLR=${args[6]}
printf "%-40s | %-18s | %5s\n" "$IP" "$X,$Y" "$CLR"
fi
done <&0
}
declare -A ACTIVEIPS
parse_log_active_ips()
{
while read -r -a args
do
CAN=${args[2]}
X=${args[3]}
Y=${args[4]}
if [ "$CAN" -eq "$CANVAS" -a "$X" -ge "$STARTX" -a "$X" -le "$ENDX" -a "$Y" -ge "$STARTY" -a "$Y" -le "$ENDY" ]
then
IP=${args[0]}
if [ -z "${ACTIVEIPS[$IP]}" ]
then
CNT=0
else
CNT=`echo ${ACTIVEIPS[$IP]} | sed 's/ .*//'`
fi
CNT=$((${CNT} + 1))
CLR=${args[6]}
ACTIVEIPS[$IP]="$CNT $IP $X,$Y $CLR"
print_active_ips | sort -rV
fi
done <&0
}
print_active_ips()
{
clear
for IP in "${!ACTIVEIPS[@]}"
do
printf "%-7s | %-40s | %-18s | %5s\n" ${ACTIVEIPS[$IP]}
done
}
tail -f ${LOGFILE} | parse_log_active_ips

View File

@ -1,13 +0,0 @@
#!/bin/bash
#Basic shell script to convert text proxy list to readable list
echo "" > proxies.txt
for i in `cat ips-static.txt`; do
HOST=`echo $i | sed 's/\(.*\):.*:.*:.*/\1/'`
PORT=`echo $i | sed 's/.*:\(.*\):.*:.*/\1/'`
USER=`echo $i | sed 's/.*:.*:\(.*\):.*/\1/'`
IP=`echo $USER | sed 's/.*-\(.*\)/\1/'`
PASSWORD=`echo $i | sed 's/.*:.*:.*:\(.*\)/\1/'`
#COUNTRY=`geoiplookup $IP`
echo "http://$USER:$PASSWORD@$HOST:$PORT" >> proxies.txt
done

View File

@ -1,165 +0,0 @@
// this scripts converts the old 64x64 chunks that were organiced relative to the center to 256x256 chunks with 0.0 being top-left corner
// it also goes from 2 pixel per byte to 1 pixel per byte
// old colors are converted to new order
import { TILE_SIZE, CANVAS_SIZE, CANVAS_MIN_XY, CANVAS_MAX_XY } from '../src/core/constants';
import redis from 'redis';
//ATTENTION Make suer to set the rdis URLs right!!!
const oldurl = "redis://localhost:6380";
const oldredis = redis.createClient({ url: oldurl, return_buffers: true });
const newurl = "redis://localhost:6379";
const newredis = redis.createClient({ url: newurl, return_buffers: true });
const CHUNK_SIZE = 64; //old chunk size
const CHUNKS_IN_BASETILE = TILE_SIZE / CHUNK_SIZE;
const CHUNK_MIN_XY = Math.floor(CANVAS_MIN_XY / CHUNK_SIZE);
const CHUNK_MAX_XY = Math.floor(CANVAS_MAX_XY / CHUNK_SIZE);
import { COLORS_ABGR } from '../src/core/Color';
//-----------------------------
// old colors
const OLD_COLORS_RGB: Uint8Array = new Uint8Array( [
202, 227, 255, //first color is unset pixel in ocean
255, 255, 255, //second color is unset pixel on land
255, 255, 255, //white
228, 228, 228, //light gray
136, 136, 136, //dark gray
78, 78, 78, //darker gray
0, 0, 0, //black
244, 179, 174, //light pink
255, 167, 209, //pink
255, 101, 101, //peach
229, 0, 0, //red
254, 164, 96, //light brown
229, 149, 0, //orange
160, 106, 66, //brown
245, 223, 176, //sand
229, 217, 0, //yellow
148, 224, 68, //light green
2, 190, 1, //green
0, 101, 19, //dark green
202, 227, 255, //sky blue
0, 211, 221, //light blue
0, 131, 199, //dark blue
0, 0, 234, //blue
25, 25, 115, //darker blue
207, 110, 228, //light violette
130, 0, 128, //violette
]
);
export const OLD_COLORS_ABGR: Uint32Array = new Uint32Array(OLD_COLORS_RGB.length / 3);
let cnt = 0;
for (let index = 0; index < OLD_COLORS_ABGR.length; index += 1) {
const r = OLD_COLORS_RGB[cnt++];
const g = OLD_COLORS_RGB[cnt++];
const b = OLD_COLORS_RGB[cnt++];
OLD_COLORS_ABGR[index] = (0xFF000000) | (b << 16) | (g << 8) | (r);
}
cnt = null;
//-----------------------------
/*
* convert new color to old color
* @param clr Color index of old color
* @return Color index of new, converted color
*/
function colorConvert(clr: number): number {
clr = clr & 0x1F; //this removes protections
if (clr == 2) return 2; //hardcoded exception for
if (clr == 19) return 25; //the valid white and ocean blue
const oldClr = OLD_COLORS_ABGR[clr];
const newClr = COLORS_ABGR.indexOf(oldClr);
return newClr;
}
/*
* Creating new basechunk if new size is a multiple of the old size
* @param x x coordinates of chunk (in chunk coordinates, not pixel coordinates)
* @param y y coordinates of chunk (in chunk coordinates, not pixel coordinates)
*/
async function createBasechunkFromMultipleOldChunks(x: number, y: number): Uint8Array {
const chunkBuffer = new Uint8Array(TILE_SIZE * TILE_SIZE);
const xabs = x * CHUNKS_IN_BASETILE + CHUNK_MIN_XY;
const yabs = y * CHUNKS_IN_BASETILE + CHUNK_MIN_XY;
let na = 0;
for (let dy = 0; dy < CHUNKS_IN_BASETILE; dy += 1) {
for (let dx = 0; dx < CHUNKS_IN_BASETILE; dx += 1) {
const smallchunk = await oldredis.get(`chunk:${xabs + dx}:${yabs + dy}`);
if (!smallchunk) {
na++;
continue;
}
const chunk = new Uint8Array(smallchunk);
const chunkOffset = (dx + dy * CHUNKS_IN_BASETILE * CHUNK_SIZE) * CHUNK_SIZE; //offset in pixels
let pos = 0;
for (let row = 0; row < CHUNK_SIZE; row += 1) {
let pixelOffset = (chunkOffset + row * CHUNK_SIZE * CHUNKS_IN_BASETILE);
const max = pixelOffset + CHUNK_SIZE;
while (pixelOffset < max) {
let color = chunk[pos++];
chunkBuffer[pixelOffset++] = colorConvert(color >> 4);
chunkBuffer[pixelOffset++] = colorConvert(color & 0x0F);
}
}
}
}
if (na != CHUNKS_IN_BASETILE * CHUNKS_IN_BASETILE) {
const key = `chunk:${x}:${y}`;
const setNXArgs = [key, Buffer.from(chunkBuffer.buffer).toString('binary')]
await newredis.sendCommand('SETNX', setNXArgs);
console.log("Created Chunk ", key, "with", na, "empty chunks");
}
}
/*
* Creating new basechunk if the sizes are the same, just the colors chaned
* @param x x coordinates of chunk (in chunk coordinates, not pixel coordinates)
* @param y y coordinates of chunk (in chunk coordinates, not pixel coordinates)
*/
async function createBasechunk(x: number, y: number): Uint8Array {
const key = `chunk:${x}:${y}`;
const newChunk = new Uint8Array(TILE_SIZE * TILE_SIZE);
const smallchunk = await oldredis.get(key);
if (!smallchunk) {
return
}
const oldChunk = new Uint8Array(smallchunk);
if (oldChunk.length != newChunk.length || oldChunk.length != TILE_SIZE * TILE_SIZE) {
console.log(`ERROR: Chunk length ${oldChunk.length} of chunk ${x},${y} not of correct size!`);
}
for (let px = 0; px < oldChunk.length; px += 1) {
newChunk[px] = colorConvert(oldChunk[px]);
}
const setNXArgs = [key, Buffer.from(newChunk.buffer).toString('binary')]
await newredis.sendCommand('SETNX', setNXArgs);
console.log("Created Chunk ", key);
}
/*
* Convert redis canvas
*/
async function convert() {
for (let x = 0; x < CANVAS_SIZE / TILE_SIZE; x++) {
console.log(x);
for (let y = 0; y < CANVAS_SIZE / TILE_SIZE; y++) {
await createBasechunk(x, y);
}
}
}
oldredis.connect()
.then(() => newredis.connect())
.then(() => convert());

View File

@ -1,66 +0,0 @@
/* @flow */
//this script just copies chunks from one redis to another
import redis from 'redis';
import {
TILE_SIZE,
THREE_TILE_SIZE,
} from '../src/core/constants';
//ATTENTION Make suer to set the rdis URLs right!!!
const oldurl = "redis://localhost:6380";
const oldredis = redis.createClient({ url: oldurl });
const newurl = "redis://localhost:6379";
const newredis = redis.createClient({ url: newurl });
oldredis.connect();
newredis.connect();
const CANVAS_SIZE = 1024;
const OUR_TILE_SIZE = THREE_TILE_SIZE;
const CHUNKS_XY = CANVAS_SIZE / OUR_TILE_SIZE;
async function copyChunks() {
for (let x = 0; x < CHUNKS_XY; x++) {
for (let y = 0; y < CHUNKS_XY; y++) {
const oldkey = `ch:2:${x}:${y}`;
const newkey = `ch:2:${x}:${y}`;
const chunk = await oldredis.get(oldkey);
if (chunk) {
await newredis.set(newkey, chunk);
console.log("Created Chunk ", newkey);
}
}
}
}
function chunkOfCord(cor) {
return Math.floor((cor + CANVAS_SIZE / 2) / OUR_TILE_SIZE);
}
async function copyChunksByCoords(xMin, xMax, yMin, yMax) {
const chunkXMin = chunkOfCord(xMin);
const chunkXMax = chunkOfCord(xMax);
const chunkYMin = chunkOfCord(yMin);
const chunkYMax = chunkOfCord(yMax);
for (let x = chunkXMin; x <= chunkXMax; x++) {
for (let y = chunkYMin; y <= chunkYMax; y++) {
const oldkey = `ch:2:${x}:${y}`;
const newkey = `ch:2:${x}:${y}`;
const chunk = await oldredis.get(oldkey);
if (chunk) {
await newredis.set(newkey, chunk);
console.log("Created Chunk ", newkey);
} else {
await newredis.del(newkey);
console.log("Deleted Chunk ", newkey);
}
}
}
}
module.exports = copyChunksByCoords;
// copyChunksByCoords(-160, 60, -60, 160);

View File

@ -1,42 +0,0 @@
/* @flow */
//this script removes protection from all pixels on main canas
import redis from 'redis';
//ATTENTION Make suer to set the rdis URLs right!!!
const urlo = "redis://localhost:6379";
const url = "redis://localhost:6380";
const rediso = redis.createClient(urlo, { return_buffers: true });
const redisc = redis.createClient(url, { return_buffers: true });
const CANVAS_SIZE = 256 * 256;
const TILE_SIZE = 256;
const CHUNKS_XY = CANVAS_SIZE / TILE_SIZE;
async function moveProtection() {
for (let x = 0; x < CHUNKS_XY; x++) {
for (let y = 0; y < CHUNKS_XY; y++) {
const key = `ch:0:${x}:${y}`;
const chunk = await redisc.get(key);
if (chunk) {
const buffer = new Uint8Array(chunk);
let changed = false;
for (let u = 0; u < buffer.length; ++u) {
const bit = buffer[u];
if (bit & 0x80) {
buffer[u] = bit & 0x1F;
changed = true;
}
}
if (changed) {
await rediso.set(key, Buffer.from(buffer.buffer));
console.log("Changed Chunk ", key);
}
}
}
}
console.log("done");
}
moveProtection();

View File

@ -1,202 +0,0 @@
/*
* Just a testscript for sequelize sql stuff,
*
*/
import Sequelize from 'sequelize';
import DataType from 'sequelize';
import Model from 'sequelize';
import bcrypt from 'bcrypt';
const mysql_host = "localhost";
const mysql_user = "user";
const mysql_password = "password";
const mysql_db = "database";
const Op = Sequelize.Op;
const operatorsAliases = {
$eq: Op.eq,
$ne: Op.ne,
$gte: Op.gte,
$gt: Op.gt,
$lte: Op.lte,
$lt: Op.lt,
$not: Op.not,
$in: Op.in,
$notIn: Op.notIn,
$is: Op.is,
$like: Op.like,
$notLike: Op.notLike,
$iLike: Op.iLike,
$notILike: Op.notILike,
$regexp: Op.regexp,
$notRegexp: Op.notRegexp,
$iRegexp: Op.iRegexp,
$notIRegexp: Op.notIRegexp,
$between: Op.between,
$notBetween: Op.notBetween,
$overlap: Op.overlap,
$contains: Op.contains,
$contained: Op.contained,
$adjacent: Op.adjacent,
$strictLeft: Op.strictLeft,
$strictRight: Op.strictRight,
$noExtendRight: Op.noExtendRight,
$noExtendLeft: Op.noExtendLeft,
$and: Op.and,
$or: Op.or,
$any: Op.any,
$all: Op.all,
$values: Op.values,
$col: Op.col
};
const sequelize = new Sequelize(mysql_db, mysql_user, mysql_password, {
host: mysql_host,
dialect: 'mysql',
pool: {
min: 5,
max: 25,
idle: 10000,
acquire: 10000,
},
dialectOptions: {
connectTimeout: 10000,
multipleStatements: true,
},
operatorsAliases: operatorsAliases, // use Sequelize.Op
multipleStatements: true,
//operatorsAliases: false,
});
const RegUser = sequelize.define('User', {
id: {
type: DataType.INTEGER.UNSIGNED,
autoIncrement: true,
primaryKey: true,
},
email: {
type: DataType.CHAR(40),
allowNull: true,
},
name: {
type: DataType.CHAR(32),
allowNull: false,
},
//null if external oauth authentification
password: {
type: DataType.CHAR(60),
allowNull: true,
},
totalPixels: {
type: DataType.INTEGER.UNSIGNED,
allowNull: false,
defaultValue: 0,
},
dailyTotalPixels: {
type: DataType.INTEGER.UNSIGNED,
allowNull: false,
defaultValue: 0,
},
ranking: {
type: DataType.INTEGER.UNSIGNED,
allowNull: true,
},
dailyRanking: {
type: DataType.INTEGER.UNSIGNED,
allowNull: true,
},
//mail verified
verified: {
type: DataType.BOOLEAN,
allowNull: false,
defaultValue: false,
},
discordid: {
type: DataType.CHAR(18),
allowNull: true,
},
redditid: {
type: DataType.CHAR(10),
allowNull: true,
},
//when mail verification got requested,
//used for purging unverified accounts
verificationReqAt: {
type: DataType.DATE,
allowNull: true,
},
lastLogIn: {
type: DataType.DATE,
allowNull: true,
},
}, {
multipleStatements: true,
timestamps: true,
updatedAt: false,
setterMethods: {
password(value: string): string {
if(value) this.setDataValue('password', generateHash(value));
},
},
});
async function recalculate() {
//multiple sql statements at once,
//important here, because splitting them would cause different thread pools with different @r to get used
await sequelize.query("SET @r=0; UPDATE Users SET ranking= @r:= (@r + 1) ORDER BY totalPixels DESC;");
await sequelize.query("SET @r=0; UPDATE Users SET dailyRanking= @r:= (@r + 1) ORDER BY dailyTotalPixels DESC;");
//delete all rows with timestamp older than 4 days
RegUser.destroy({
where: {
verificationReqAt: {
$lt: Sequelize.literal('CURRENT_TIMESTAMP - INTERVAL 4 DAY')
},
verified: 0,
}
})
//update whole column
RegUser.update({dailyTotalPixels: 0},{where:{}});
//select command that does also print datediff
RegUser.findAll({
attributes: [ 'name', 'totalPixels', 'ranking' , 'dailyRanking', 'dailyTotalPixels', 'createdAt', [Sequelize.fn('DATEDIFF', Sequelize.literal('CURRENT_TIMESTAMP'), Sequelize.col('createdAt')),'age']],
limit: 10,
order: ['ranking'],
}).then((users) =>{
console.log("All users:", JSON.stringify(users, null, 4));
return;
const ranking = [];
users.forEach((user) => {
const createdAt = new Date(user.createdAt);
const registeredSince = createdAt.getDate() + "." + (createdAt.getMonth()+1) + "." + createdAt.getFullYear();
ranking.push({
rank: user.ranking,
name: user.name,
totalPixels: user.totalPixels,
dailyRanking: user.dailyRanking,
dailyTotalPixels: user.dailyTotalPixels,
registeredSince,
});
});
console.log(ranking);
});
}
setTimeout(recalculate, 2000);

View File

@ -1,106 +0,0 @@
/*
* upload image to canvs in console
* doesn't care about previous data
* no checks - don't use if you can use Admintools
*/
import redis from 'redis';
import sharp from 'sharp';
import canvases from '../src/canvases.json';
import Palette from '../src/core/Palette';
import {
getChunkOfPixel,
} from '../src/core/utils';
import {
TILE_SIZE,
} from '../src/core/constants';
//ATTENTION Make suer to set the rdis URLs right!!!
const redisurl = "redis://localhost:6379";
const redisCanvas = redis.createClient(redisurl, { return_buffers: true });
/*
* copied and modified from src/core/Image.js
*/
async function imageABGR2Canvas(
canvasId: number,
x: number,
y: number,
data: Buffer,
width: number,
height: number,
) {
console.log(
`Loading image with dim ${width}/${height} to ${x}/${y}/${canvasId}`,
);
const canvas = canvases[canvasId];
const { colors, cli, size } = canvas;
const palette = new Palette(colors);
const canvasMinXY = -(size / 2);
const imageData = new Uint32Array(data.buffer);
const [ucx, ucy] = getChunkOfPixel(size, x, y);
const [lcx, lcy] = getChunkOfPixel(size, x + width, y + height);
let totalPxlCnt = 0;
console.log(`Loading to chunks from ${ucx} / ${ucy} to ${lcx} / ${lcy} ...`);
let chunk;
for (let cx = ucx; cx <= lcx; cx += 1) {
for (let cy = ucy; cy <= lcy; cy += 1) {
chunk = new Uint8Array(TILE_SIZE * TILE_SIZE);
// offset of chunk in image
const cOffX = cx * TILE_SIZE + canvasMinXY - x;
const cOffY = cy * TILE_SIZE + canvasMinXY - y;
let cOff = 0;
let pxlCnt = 0;
for (let py = 0; py < TILE_SIZE; py += 1) {
for (let px = 0; px < TILE_SIZE; px += 1) {
const clrX = cOffX + px;
const clrY = cOffY + py;
if (clrX >= 0 && clrY >= 0 && clrX < width && clrY < height) {
const clr = imageData[clrX + clrY * width];
const clrIndex = palette.abgr.indexOf(clr);
if (clrIndex !== -1) {
chunk[cOff] = clrIndex;
pxlCnt += 1;
}
}
cOff += 1;
}
}
if (pxlCnt) {
const key = `ch:${canvasId}:${cx}:${cy}`;
await redisCanvas.set(key, Buffer.from(chunk.buffer));
console.log(`Loaded ${pxlCnt} pixels into chunk ${cx}, ${cy}.`);
totalPxlCnt += pxlCnt;
}
chunk = null;
}
}
console.log('Image loading done.');
return totalPxlCnt;
}
async function uploadImage(
path,
canvasId,
x,
y,
) {
const { data, info } = await sharp(path)
.ensureAlpha()
.raw()
.toBuffer({ resolveWithObject: true });
const pxlCount = await imageABGR2Canvas(
canvasId,
x, y,
data,
info.width, info.height,
);
}
uploadImage('PZ.png', '5', -4096, -4096);
//uploadImage('PC.png', '6', -7000, -7000)