380 lines
13 KiB
JavaScript
380 lines
13 KiB
JavaScript
/*Canopy - The next generation of stoner streaming software
|
|
Copyright (C) 2024-2025 Rainbownapkin and the TTN Community
|
|
|
|
This program is free software: you can redistribute it and/or modify
|
|
it under the terms of the GNU Affero General Public License as
|
|
published by the Free Software Foundation, either version 3 of the
|
|
License, or (at your option) any later version.
|
|
|
|
This program is distributed in the hope that it will be useful,
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
GNU Affero General Public License for more details.
|
|
|
|
You should have received a copy of the GNU Affero General Public License
|
|
along with this program. If not, see <https://www.gnu.org/licenses/>.*/
|
|
|
|
//Node Imports
|
|
const fs = require('node:fs/promises');
|
|
|
|
//NPM Imports
|
|
const {mongoose} = require('mongoose');
|
|
const validator = require('validator');
|
|
|
|
//local imports
|
|
const config = require('../../../config.json');
|
|
const {userModel} = require('../user/userSchema');
|
|
const permissionModel = require('../permissionSchema');
|
|
const tokeModel = require('../tokebot/tokeSchema');
|
|
const statModel = require('../statSchema');
|
|
const emailChangeModel = require('../user/emailChangeSchema');
|
|
const loggerUtils = require('../../utils/loggerUtils');
|
|
const hashUtils = require('../../utils/hashUtils');
|
|
const mailUtils = require('../../utils/mailUtils');
|
|
|
|
/**
|
|
* DB Schema for documents representing legacy fore.st migration data for a single user account
|
|
*/
|
|
const migrationSchema = new mongoose.Schema({
|
|
user:{
|
|
type: mongoose.SchemaTypes.String,
|
|
unique: true,
|
|
required: true
|
|
},
|
|
pass: {
|
|
type: mongoose.SchemaTypes.String,
|
|
required: true
|
|
},
|
|
rank: {
|
|
type: mongoose.SchemaTypes.Number,
|
|
required: true
|
|
},
|
|
email: {
|
|
type: mongoose.SchemaTypes.String,
|
|
default: ''
|
|
},
|
|
bio: {
|
|
type: mongoose.SchemaTypes.String,
|
|
default: 'Bio not set!'
|
|
},
|
|
image: {
|
|
type: mongoose.SchemaTypes.String,
|
|
default: "/nonfree/johnny.png"
|
|
},
|
|
date: {
|
|
type: mongoose.SchemaTypes.Date,
|
|
required: true
|
|
},
|
|
tokes: {
|
|
type: mongoose.SchemaTypes.Number,
|
|
default: 0,
|
|
}
|
|
});
|
|
|
|
//TODO: before next commit, add error checking to the ingestLegacy statics down below
|
|
//Also add a warning for the fail condition in ingestLegacyDump that bails out when missing files
|
|
|
|
//statics
|
|
/**
|
|
* Static method for ingesting data dump from legacy cytube/fore.st server
|
|
*/
|
|
migrationSchema.statics.ingestLegacyDump = async function(){
|
|
try{
|
|
//If migration is disabled
|
|
if(!config.migrate){
|
|
await tokeModel.dropLegacyTokes();
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
//Migration directories/file
|
|
const dir = "./migration/"
|
|
const userDump = `${dir}users.sql`
|
|
const tokeDir = `./migration/tokebot/`
|
|
|
|
//Create array to hold list of toke dump files
|
|
let tokeDumps = [];
|
|
|
|
//Double check migration files
|
|
try{
|
|
//Pull dump stats
|
|
await fs.stat(userDump);
|
|
|
|
//Pull toke related files
|
|
tokeDumps = await fs.readdir(tokeDir)
|
|
|
|
//If we caught an error (most likely it's missing)
|
|
}catch(err){
|
|
loggerUtils.consoleWarn("No migration files detected! Pleas provide legacy migration files or disable migration from config.json!");
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
//Pull raw dump from file
|
|
const rawDump = await fs.readFile(userDump, 'binary');
|
|
|
|
//Split dump by line
|
|
const splitDump = rawDump.split('\n');
|
|
|
|
//For each line in the user dump
|
|
for(const line of splitDump){
|
|
//Ingest the legacy user profile
|
|
//Waiting on this is a lot less effecient...
|
|
//But I'm too lazy to write a while loop that waits on every promise to return gracefully to make something that will run like once preform better.
|
|
await this.ingestLegacyUser(line);
|
|
}
|
|
|
|
|
|
//Create arrays to hold toke dumps contents
|
|
const tokeMaps = [];
|
|
const tokeLogs = [];
|
|
|
|
//For every toke related file
|
|
for(const file of tokeDumps){
|
|
//Read toke related file
|
|
const rawContents = await fs.readFile(`${tokeDir}${file}`, 'binary');
|
|
|
|
//If its a toke file containing a list of toke counts per profile
|
|
if(file.match(/\_tokefile/) != null){
|
|
//Push raw toke map into toke maps array
|
|
tokeMaps.push(rawContents);
|
|
//If its a toke log containing a list of tokes
|
|
}else if(file.match(/\_toke\.log/)){
|
|
//Push file contents into toke log array
|
|
tokeLogs.push(rawContents);
|
|
}
|
|
}
|
|
|
|
//Ingest toke maps
|
|
await this.ingestTokeMaps(tokeMaps);
|
|
|
|
//Pass toke logs over to the stat model for further ingestion
|
|
await tokeModel.ingestLegacyTokes(tokeLogs);
|
|
|
|
loggerUtils.consoleWarn(`Legacy Server Migration Completed at: ${new Date().toLocaleString()}`);
|
|
}catch(err){
|
|
return loggerUtils.localExceptionHandler(err);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Ingests a single line containing a single profile out of an .sql data dump from a legacy cytube/fore.st server
|
|
* @param {String} rawProfile - Line of text contianing raw profile dump
|
|
*/
|
|
migrationSchema.statics.ingestLegacyUser = async function(rawProfile){
|
|
try{
|
|
//If migration is disabled
|
|
if(!config.migrate){
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
//Filter out the entry from any extra guff on the line
|
|
const profileMatches = rawProfile.match(/^\((.*?(?=,),){9}.*?(?=\))\)/g);
|
|
|
|
//If we have an invalid line
|
|
if(profileMatches <= 0){
|
|
loggerUtils.consoleWarn('Bad profile detected in legacy dump:');
|
|
loggerUtils.consoleWarn(rawProfile);
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
//Set filtered profile to the match we found
|
|
let filteredProfile = profileMatches[0];
|
|
|
|
//cook the filtered profile in order to trick the JSON interpreter into thinking it's an array
|
|
filteredProfile = `[${filteredProfile.substring(1, filteredProfile.length - 1)}]`;
|
|
|
|
//Replace single qoutes with double to match JSON strings
|
|
filteredProfile = filteredProfile.replaceAll(",'",',"');
|
|
filteredProfile = filteredProfile.replaceAll("',",'",');
|
|
|
|
//Make sure single qoutes are escaped
|
|
filteredProfile = filteredProfile.replaceAll("\'",'\\\'');
|
|
|
|
|
|
//Dupe the JSON interpreter like the rube that it is
|
|
const profileArray = JSON.parse(filteredProfile);
|
|
|
|
//If profile array is the wrong length
|
|
if(profileArray.length != 10){
|
|
loggerUtils.consoleWarn('Bad profile detected in legacy dump:');
|
|
loggerUtils.consoleWarn(profileArray);
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
//Look for user in migration table
|
|
const foundMigration = await this.findOne({user:profileArray[1]});
|
|
const foundUser = await userModel.findOne({user: profileArray[1]});
|
|
|
|
//If we found the user in the database
|
|
if(foundMigration != null || foundUser != null){
|
|
//Scream
|
|
loggerUtils.consoleWarn(`Found legacy user ${profileArray[1]} in database, skipping migration!`);
|
|
//BAIL!
|
|
return;
|
|
}
|
|
|
|
|
|
//Create migration profile object from scraped info
|
|
const migrationProfile = new this({
|
|
user: profileArray[1],
|
|
pass: profileArray[2],
|
|
//Clamp rank to 0 and the max setting allowed by the rank enum
|
|
rank: Math.min(Math.max(0, profileArray[3]), permissionModel.rankEnum.length - 1),
|
|
email: validator.normalizeEmail(profileArray[4]),
|
|
date: profileArray[7],
|
|
})
|
|
|
|
//If our profile array isn't empty
|
|
if(profileArray[5] != ''){
|
|
//Make sure single qoutes are escaped, and parse bio JSON
|
|
const bioObject = JSON.parse(profileArray[5].replaceAll("\'",'\\\''));
|
|
|
|
//Inject bio information into migration profile, only if they're present;
|
|
migrationProfile.bio = bioObject.text == '' ? undefined : validator.escape(bioObject.text);
|
|
migrationProfile.image = bioObject.image == '' ? undefined : validator.escape(bioObject.image);
|
|
}
|
|
|
|
//Build DB Doc from migration Profile hashtable and dump it into the DB
|
|
await this.create(migrationProfile);
|
|
|
|
//Let the world know of our triumph!
|
|
console.log(`Legacy user profile ${migrationProfile.user} migrated successfully!`);
|
|
}catch(err){
|
|
return loggerUtils.localExceptionHandler(err);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Ingests array of raw toke map data ripped from the migrations folder and injects it on-top of the existing migration profile collection in the DB
|
|
* @param {Array} rawTokeMaps - List of raw content ripped from legacy cytube/fore.st toke files
|
|
*/
|
|
migrationSchema.statics.ingestTokeMaps = async function(rawTokeMaps){
|
|
try{
|
|
//If server migration is disabled
|
|
if(!config.migrate){
|
|
//BAIL!!
|
|
return;
|
|
}
|
|
|
|
//Create new map to hold total toke count
|
|
const tokeMap = new Map();
|
|
|
|
//For each raw map handed to us by the main ingestion method
|
|
for(const rawMap of rawTokeMaps){
|
|
//Parse map into dehydrated map array
|
|
const dehydratedMap = JSON.parse(rawMap);
|
|
|
|
//We don't need to re-hydrate a map we're just going to fucking iterate through like an array...
|
|
for(const curCount of dehydratedMap.value){
|
|
//Get current toke count for user
|
|
const total = tokeMap.get(curCount[0]);
|
|
|
|
//If this user isn't counted
|
|
if(total == null || total == 0){
|
|
//Set users toke count to parsed count
|
|
tokeMap.set(curCount[0], curCount[1]);
|
|
//Otherwise
|
|
}else{
|
|
//Add parsed count to users total
|
|
tokeMap.set(curCount[0], curCount[1] + total);
|
|
}
|
|
}
|
|
}
|
|
|
|
//For each toking user
|
|
for(const toker of tokeMap){
|
|
//Update migration profile to include total tokes
|
|
await this.updateOne({user: toker[0]},{$set:{tokes: toker[1]}});
|
|
console.log(`${toker[1]} tokes injected into user profile ${toker[0]}!`);
|
|
}
|
|
}catch(err){
|
|
return loggerutils.localexceptionhandler(err);
|
|
}
|
|
}
|
|
|
|
migrationSchema.statics.buildMigrationCache = async function(){
|
|
//Pull all profiles from the Legacy Profile Migration DB collection
|
|
const legacyProfiles = await this.find();
|
|
|
|
//For each profile in the migration collection
|
|
for(const profile of legacyProfiles){
|
|
//Push the username into the migration cache
|
|
userModel.migrationCache.users.push(profile.user.toLowerCase());
|
|
//If the profile has an email address set
|
|
if(profile.email != null && profile.email != ''){
|
|
//Add the email to the migration cache
|
|
userModel.migrationCache.emails.push(profile.email.toLowerCase());
|
|
}
|
|
}
|
|
}
|
|
|
|
migrationSchema.statics.consumeByUsername = async function(ip, migration){
|
|
//Pull migration doc by case-insensitive username
|
|
const migrationDB = await this.findOne({user: new RegExp(migration.user, 'i')});
|
|
|
|
//If we have no migration document
|
|
if(migrationDB == null){
|
|
//Bitch and moan
|
|
throw loggerUtils.exceptionSmith("Incorrect username/password.", "migration");
|
|
}
|
|
|
|
//Wait on the miration DB token to be consumed
|
|
await migrationDB.consume(ip, migration);
|
|
}
|
|
|
|
//Methods
|
|
/**
|
|
* Consumes a migration profile and creates a new, modern canopy profile from the original.
|
|
* @param {String} oldPass - Original password to authenticate migration against
|
|
* @param {String} newPass - New password to re-hash with modern hashing algo
|
|
* @param {String} passConfirm - Confirmation for the new pass
|
|
*/
|
|
migrationSchema.methods.consume = async function(ip, migration){
|
|
//If we where handed a bad password
|
|
if(!hashUtils.compareLegacyPassword(migration.oldPass, this.pass)){
|
|
//Complain
|
|
throw loggerUtils.exceptionSmith("Incorrect username/password.", "migration");
|
|
}
|
|
|
|
//If we where handed a mismatched confirmation password
|
|
if(migration.newPass != migration.passConfirm){
|
|
//Complain
|
|
throw loggerUtils.exceptionSmith("New password does not match confirmation password.", "migration");
|
|
}
|
|
|
|
//Increment user count
|
|
const id = await statModel.incrementUserCount();
|
|
|
|
//Create new user from profile info
|
|
const newUser = await userModel.create({
|
|
id,
|
|
user: this.user,
|
|
pass: migration.newPass,
|
|
rank: permissionModel.rankEnum[this.rank],
|
|
bio: this.bio,
|
|
img: this.image,
|
|
date: this.date,
|
|
tokes: new Map([["Legacy Tokes", this.tokes]])
|
|
});
|
|
|
|
//Tattoo hashed IP use to migrate to the new user account
|
|
await newUser.tattooIPRecord(ip);
|
|
|
|
//if we submitted an email
|
|
if(this.email != null && validator.isEmail(this.email)){
|
|
//Generate new email change request
|
|
const requestDB = await emailChangeModel.create({user: newUser._id, newEmail: this.email, ipHash: ip});
|
|
|
|
//Send tokenized confirmation email
|
|
mailUtils.sendAddressVerification(requestDB, newUser, this.email, false, true);
|
|
}
|
|
|
|
//Nuke out miration entry
|
|
await this.deleteOne();
|
|
}
|
|
|
|
module.exports = mongoose.model("migration", migrationSchema); |