From 9fda308306178860b0d224a5d58de31f9351a568 Mon Sep 17 00:00:00 2001 From: rainbow napkin Date: Thu, 9 Oct 2025 03:50:05 -0400 Subject: [PATCH] Started work on legacy account migration. --- .gitignore | 3 +- config.example.json | 1 + config.example.jsonc | 4 + src/schemas/user/migrationSchema.js | 190 ++++++++++++++++++++++++++++ src/server.js | 4 + 5 files changed, 201 insertions(+), 1 deletion(-) create mode 100644 src/schemas/user/migrationSchema.js diff --git a/.gitignore b/.gitignore index 928c465..9868f22 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ state.json chatexamples.txt server.cert server.key -www/nonfree/* \ No newline at end of file +www/nonfree/* +migration/* \ No newline at end of file diff --git a/config.example.json b/config.example.json index 22c388c..372a5ac 100644 --- a/config.example.json +++ b/config.example.json @@ -9,6 +9,7 @@ "sessionSecret": "CHANGE_ME", "altchaSecret": "CHANGE_ME", "ipSecret": "CHANGE_ME", + "migrate": false, "ssl":{ "cert": "./server.cert", "key": "./server.key" diff --git a/config.example.jsonc b/config.example.jsonc index 544e906..fea8c16 100644 --- a/config.example.jsonc +++ b/config.example.jsonc @@ -24,6 +24,10 @@ "altchaSecret": "CHANGE_ME", //IP Secret used to salt IP Hashes "ipSecret": "CHANGE_ME", + //Enable to migrate legacy DB and toke files dumped into the ./migration/ directory + //WARNING: The migration folder is cleared after server boot, whether or not a migration took place or this option is enabled. + //Keep your backups in a safe place, preferably a machine that DOESN'T have open inbound ports exposed to the internet/a publically accessible reverse proxy! + "migrate": false, //SSL cert and key locations "ssl":{ "cert": "./server.cert", diff --git a/src/schemas/user/migrationSchema.js b/src/schemas/user/migrationSchema.js new file mode 100644 index 0000000..6c4bad9 --- /dev/null +++ b/src/schemas/user/migrationSchema.js @@ -0,0 +1,190 @@ +/*Canopy - The next generation of stoner streaming software +Copyright (C) 2024-2025 Rainbownapkin and the TTN Community + +This program is free software: you can redistribute it and/or modify +it under the terms of the GNU Affero General Public License as +published by the Free Software Foundation, either version 3 of the +License, or (at your option) any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License +along with this program. If not, see .*/ + +//Node Imports +const fs = require('node:fs/promises'); + +//NPM Imports +const {mongoose} = require('mongoose'); + +//local imports +const config = require('../../../config.json'); +const {userModel} = require('../user/userSchema'); +const permissionModel = require('../permissionSchema'); + + +/** + * DB Schema for documents representing legacy fore.st migration data for a single user account + */ +const migrationSchema = new mongoose.Schema({ + user:{ + type: mongoose.SchemaTypes.String, + unique: true, + required: true + }, + pass: { + type: mongoose.SchemaTypes.String, + required: true + }, + rank: { + type: mongoose.SchemaTypes.Number, + required: true + }, + email: { + type: mongoose.SchemaTypes.String, + default: '' + }, + bio: { + type: mongoose.SchemaTypes.String, + default: 'Bio not set!' + }, + image: { + type: mongoose.SchemaTypes.String, + default: "/nonfree/johnny.png" + }, + date: { + type: mongoose.SchemaTypes.Date, + required: true + }, + tokes: { + type: mongoose.SchemaTypes.Map, + default: new Map(), + required: true + }, +}); + +//statics +/** + * Static method for ingesting data dump from legacy cytube/fore.st server + */ +migrationSchema.statics.ingestLegacyDump = async function(){ + //If migration is disabled + if(!config.migrate){ + //BAIL! + return; + } + + //Crash directory + const dir = "./migration/" + const userDump = `${dir}users.sql` + + //Double check migration files + try{ + //Pull dump stats + await fs.stat(userDump); + //If we caught an error (most likely it's missing) + }catch(err){ + //BAIL! + return; + } + + //Pull raw dump from file + const rawDump = await fs.readFile(userDump, 'binary'); + + //Split dump by line + const splitDump = rawDump.split('\n'); + + //For each line in the user dump + for(const line of splitDump){ + //Ingest the legacy user profile + this.ingestLegacyUser(line); + } +} + +/** + * Ingests a single line containing a single profile out of an .sql data dump from a legacy cytube/fore.st server + * @param {String} rawProfile - Line of text contianing raw profile dump + */ +migrationSchema.statics.ingestLegacyUser = async function(rawProfile){ + //If migration is disabled + if(!config.migrate){ + //BAIL! + return; + } + + //Filter out the entry from any extra guff on the line + const profileMatches = rawProfile.match(/^\((.*?(?=,),){9}.*?(?=\))\)/g); + + //If we have an invalid line + if(profileMatches <= 0){ + //BAIL! + return; + } + + //Set filtered profile to the match we found + let filteredProfile = profileMatches[0]; + + //cook the filtered profile in order to trick the JSON interpreter into thinking it's an array + filteredProfile = `[${filteredProfile.substring(1, filteredProfile.length - 1)}]`; + + //Replace single qoutes with double to match JSON strings + filteredProfile = filteredProfile.replaceAll(",'",',"'); + filteredProfile = filteredProfile.replaceAll("',",'",'); + + //Make sure single qoutes are escaped + filteredProfile = filteredProfile.replaceAll("\'",'\\\''); + + + //Dupe the JSON interpreter like the rube that it is + const profileArray = JSON.parse(filteredProfile); + + //If profile array is the wrong length + if(profileArray.length != 10){ + //BAIL! + return; + } + + //Look for user in migration table + const foundMigration = await this.findOne({user:profileArray[1]}); + const foundUser = await userModel.findOne({user: profileArray[1]}); + + //If we found the user in the database + if(foundMigration != null || foundUser != null){ + //Scream + console.log(`Found legacy user ${profileArray[1]} in database, skipping migration!`); + //BAIL! + return; + } + + + //Create migration profile object from scraped info + const migrationProfile = new this({ + user: profileArray[1], + pass: profileArray[2], + //Clamp rank to 0 and the max setting allowed by the rank enum + rank: Math.min(Math.max(0, profileArray[3]), permissionModel.rankEnum.length - 1), + email: profileArray[4], + date: profileArray[7], + }) + + //If our profile array isn't empty + if(profileArray[5] != ''){ + //Make sure single qoutes are escaped, and parse bio JSON + const bioObject = JSON.parse(profileArray[5].replaceAll("\'",'\\\'')); + + //Inject bio information into migration profile, only if they're present; + migrationProfile.bio = bioObject.text == '' ? undefined : bioObject.text; + migrationProfile.image = bioObject.image == '' ? undefined : bioObject.image; + } + + //Build DB Doc from migration Profile hashtable and dump it into the DB + await this.create(migrationProfile); + + //Let the world know of our triumph! + console.log(`Legacy user profile ${migrationProfile.user} migrated successfully!`); +} + +module.exports = mongoose.model("migration", migrationSchema); \ No newline at end of file diff --git a/src/server.js b/src/server.js index 091bbe0..8a13955 100644 --- a/src/server.js +++ b/src/server.js @@ -43,6 +43,7 @@ const statModel = require('./schemas/statSchema'); const flairModel = require('./schemas/flairSchema'); const emoteModel = require('./schemas/emoteSchema'); const tokeCommandModel = require('./schemas/tokebot/tokeCommandSchema'); +const migrationModel = require('./schemas/user/migrationSchema'); //Controller const fileNotFoundController = require('./controllers/404Controller'); //Router @@ -192,6 +193,9 @@ emoteModel.loadDefaults(); //Load default toke commands tokeCommandModel.loadDefaults(); +//Run legacy migration +migrationModel.ingestLegacyDump(); + //Kick off scheduled-jobs scheduler.kickoff();