Started work on legacy account migration.

This commit is contained in:
rainbow napkin 2025-10-09 03:50:05 -04:00
parent 4698ba4122
commit 9fda308306
5 changed files with 201 additions and 1 deletions

3
.gitignore vendored
View file

@ -11,4 +11,5 @@ state.json
chatexamples.txt chatexamples.txt
server.cert server.cert
server.key server.key
www/nonfree/* www/nonfree/*
migration/*

View file

@ -9,6 +9,7 @@
"sessionSecret": "CHANGE_ME", "sessionSecret": "CHANGE_ME",
"altchaSecret": "CHANGE_ME", "altchaSecret": "CHANGE_ME",
"ipSecret": "CHANGE_ME", "ipSecret": "CHANGE_ME",
"migrate": false,
"ssl":{ "ssl":{
"cert": "./server.cert", "cert": "./server.cert",
"key": "./server.key" "key": "./server.key"

View file

@ -24,6 +24,10 @@
"altchaSecret": "CHANGE_ME", "altchaSecret": "CHANGE_ME",
//IP Secret used to salt IP Hashes //IP Secret used to salt IP Hashes
"ipSecret": "CHANGE_ME", "ipSecret": "CHANGE_ME",
//Enable to migrate legacy DB and toke files dumped into the ./migration/ directory
//WARNING: The migration folder is cleared after server boot, whether or not a migration took place or this option is enabled.
//Keep your backups in a safe place, preferably a machine that DOESN'T have open inbound ports exposed to the internet/a publically accessible reverse proxy!
"migrate": false,
//SSL cert and key locations //SSL cert and key locations
"ssl":{ "ssl":{
"cert": "./server.cert", "cert": "./server.cert",

View file

@ -0,0 +1,190 @@
/*Canopy - The next generation of stoner streaming software
Copyright (C) 2024-2025 Rainbownapkin and the TTN Community
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.*/
//Node Imports
const fs = require('node:fs/promises');
//NPM Imports
const {mongoose} = require('mongoose');
//local imports
const config = require('../../../config.json');
const {userModel} = require('../user/userSchema');
const permissionModel = require('../permissionSchema');
/**
* DB Schema for documents representing legacy fore.st migration data for a single user account
*/
const migrationSchema = new mongoose.Schema({
user:{
type: mongoose.SchemaTypes.String,
unique: true,
required: true
},
pass: {
type: mongoose.SchemaTypes.String,
required: true
},
rank: {
type: mongoose.SchemaTypes.Number,
required: true
},
email: {
type: mongoose.SchemaTypes.String,
default: ''
},
bio: {
type: mongoose.SchemaTypes.String,
default: 'Bio not set!'
},
image: {
type: mongoose.SchemaTypes.String,
default: "/nonfree/johnny.png"
},
date: {
type: mongoose.SchemaTypes.Date,
required: true
},
tokes: {
type: mongoose.SchemaTypes.Map,
default: new Map(),
required: true
},
});
//statics
/**
* Static method for ingesting data dump from legacy cytube/fore.st server
*/
migrationSchema.statics.ingestLegacyDump = async function(){
//If migration is disabled
if(!config.migrate){
//BAIL!
return;
}
//Crash directory
const dir = "./migration/"
const userDump = `${dir}users.sql`
//Double check migration files
try{
//Pull dump stats
await fs.stat(userDump);
//If we caught an error (most likely it's missing)
}catch(err){
//BAIL!
return;
}
//Pull raw dump from file
const rawDump = await fs.readFile(userDump, 'binary');
//Split dump by line
const splitDump = rawDump.split('\n');
//For each line in the user dump
for(const line of splitDump){
//Ingest the legacy user profile
this.ingestLegacyUser(line);
}
}
/**
* Ingests a single line containing a single profile out of an .sql data dump from a legacy cytube/fore.st server
* @param {String} rawProfile - Line of text contianing raw profile dump
*/
migrationSchema.statics.ingestLegacyUser = async function(rawProfile){
//If migration is disabled
if(!config.migrate){
//BAIL!
return;
}
//Filter out the entry from any extra guff on the line
const profileMatches = rawProfile.match(/^\((.*?(?=,),){9}.*?(?=\))\)/g);
//If we have an invalid line
if(profileMatches <= 0){
//BAIL!
return;
}
//Set filtered profile to the match we found
let filteredProfile = profileMatches[0];
//cook the filtered profile in order to trick the JSON interpreter into thinking it's an array
filteredProfile = `[${filteredProfile.substring(1, filteredProfile.length - 1)}]`;
//Replace single qoutes with double to match JSON strings
filteredProfile = filteredProfile.replaceAll(",'",',"');
filteredProfile = filteredProfile.replaceAll("',",'",');
//Make sure single qoutes are escaped
filteredProfile = filteredProfile.replaceAll("\'",'\\\'');
//Dupe the JSON interpreter like the rube that it is
const profileArray = JSON.parse(filteredProfile);
//If profile array is the wrong length
if(profileArray.length != 10){
//BAIL!
return;
}
//Look for user in migration table
const foundMigration = await this.findOne({user:profileArray[1]});
const foundUser = await userModel.findOne({user: profileArray[1]});
//If we found the user in the database
if(foundMigration != null || foundUser != null){
//Scream
console.log(`Found legacy user ${profileArray[1]} in database, skipping migration!`);
//BAIL!
return;
}
//Create migration profile object from scraped info
const migrationProfile = new this({
user: profileArray[1],
pass: profileArray[2],
//Clamp rank to 0 and the max setting allowed by the rank enum
rank: Math.min(Math.max(0, profileArray[3]), permissionModel.rankEnum.length - 1),
email: profileArray[4],
date: profileArray[7],
})
//If our profile array isn't empty
if(profileArray[5] != ''){
//Make sure single qoutes are escaped, and parse bio JSON
const bioObject = JSON.parse(profileArray[5].replaceAll("\'",'\\\''));
//Inject bio information into migration profile, only if they're present;
migrationProfile.bio = bioObject.text == '' ? undefined : bioObject.text;
migrationProfile.image = bioObject.image == '' ? undefined : bioObject.image;
}
//Build DB Doc from migration Profile hashtable and dump it into the DB
await this.create(migrationProfile);
//Let the world know of our triumph!
console.log(`Legacy user profile ${migrationProfile.user} migrated successfully!`);
}
module.exports = mongoose.model("migration", migrationSchema);

View file

@ -43,6 +43,7 @@ const statModel = require('./schemas/statSchema');
const flairModel = require('./schemas/flairSchema'); const flairModel = require('./schemas/flairSchema');
const emoteModel = require('./schemas/emoteSchema'); const emoteModel = require('./schemas/emoteSchema');
const tokeCommandModel = require('./schemas/tokebot/tokeCommandSchema'); const tokeCommandModel = require('./schemas/tokebot/tokeCommandSchema');
const migrationModel = require('./schemas/user/migrationSchema');
//Controller //Controller
const fileNotFoundController = require('./controllers/404Controller'); const fileNotFoundController = require('./controllers/404Controller');
//Router //Router
@ -192,6 +193,9 @@ emoteModel.loadDefaults();
//Load default toke commands //Load default toke commands
tokeCommandModel.loadDefaults(); tokeCommandModel.loadDefaults();
//Run legacy migration
migrationModel.ingestLegacyDump();
//Kick off scheduled-jobs //Kick off scheduled-jobs
scheduler.kickoff(); scheduler.kickoff();