Added profile toke count ingestion to migration schema.

This commit is contained in:
rainbow napkin 2025-10-10 08:42:02 -04:00
parent ad0dd6bdbb
commit bb2a1369a3

View file

@ -61,10 +61,9 @@ const migrationSchema = new mongoose.Schema({
required: true
},
tokes: {
type: mongoose.SchemaTypes.Map,
default: new Map(),
required: true
},
type: mongoose.SchemaTypes.Number,
default: 0,
}
});
//TODO: before next commit, add error checking to the ingestLegacy statics down below
@ -82,14 +81,22 @@ migrationSchema.statics.ingestLegacyDump = async function(){
return;
}
//Crash directory
//Migration directories/file
const dir = "./migration/"
const userDump = `${dir}users.sql`
const tokeDir = `./migration/tokebot/`
//Create array to hold list of toke dump files
let tokeDumps = [];
//Double check migration files
try{
//Pull dump stats
await fs.stat(userDump);
//Pull toke related files
tokeDumps = await fs.readdir(tokeDir)
//If we caught an error (most likely it's missing)
}catch(err){
loggerUtils.consoleWarn("No migration files detected! Pleas provide legacy migration files or disable migration from config.json!");
@ -106,8 +113,34 @@ migrationSchema.statics.ingestLegacyDump = async function(){
//For each line in the user dump
for(const line of splitDump){
//Ingest the legacy user profile
this.ingestLegacyUser(line);
//Waiting on this is a lot less effecient...
//But I'm too lazy to write a while loop that waits on every promise to return gracefully to make something that will run like once preform better.
await this.ingestLegacyUser(line);
}
//Create arrays to hold toke dumps contents
const tokeMaps = [];
const tokeLogs = [];
//For every toke related file
for(const file of tokeDumps){
//Read toke related file
const rawContents = await fs.readFile(`${tokeDir}${file}`, 'binary');
//If its a toke file containing a list of toke counts per profile
if(file.match(/\_tokefile/) != null){
//Push raw toke map into toke maps array
tokeMaps.push(rawContents);
//If its a toke log containing a list of tokes
}else if(file.match(/\_toke\.log/)){
//Push file contents into toke log array
tokeLogs.push(rawContents);
}
}
//Ingest toke maps
this.ingestTokeMaps(tokeMaps);
}catch(err){
return loggerUtils.localExceptionHandler(err);
}
@ -168,7 +201,7 @@ migrationSchema.statics.ingestLegacyUser = async function(rawProfile){
//If we found the user in the database
if(foundMigration != null || foundUser != null){
//Scream
loggerUtils.consoleWarn(`Found legacy user ${profileArray[1]} in database, skipping migration!`);
//loggerUtils.consoleWarn(`Found legacy user ${profileArray[1]} in database, skipping migration!`);
//BAIL!
return;
}
@ -204,4 +237,52 @@ migrationSchema.statics.ingestLegacyUser = async function(rawProfile){
}
}
/**
* Ingests array of raw toke map data ripped from the migrations folder and injects it on-top of the existing migration profile collection in the DB
* @param {Array} rawTokeMaps - List of raw content ripped from legacy cytube/fore.st toke files
*/
migrationSchema.statics.ingestTokeMaps = async function(rawTokeMaps){
try{
//If server migration is disabled
if(!config.migrate){
//BAIL!!
return;
}
//Create new map to hold total toke count
const tokeMap = new Map();
//For each raw map handed to us by the main ingestion method
for(const rawMap of rawTokeMaps){
//Parse map into dehydrated map array
const dehydratedMap = JSON.parse(rawMap);
//We don't need to re-hydrate a map we're just going to fucking iterate through like an array...
for(const curCount of dehydratedMap.value){
//Get current toke count for user
const total = tokeMap.get(curCount[0]);
//If this user isn't counted
if(total == null || total == 0){
//Set users toke count to parsed count
tokeMap.set(curCount[0], curCount[1]);
//Otherwise
}else{
//Add parsed count to users total
tokeMap.set(curCount[0], curCount[1] + total);
}
}
}
//For each toking user
for(const toker of tokeMap){
//Update migration profile to include total tokes
await this.updateOne({user: toker[0]},{$set:{tokes: toker[1]}});
console.log(`${toker[1]} tokes injected into user profile ${toker[0]}!`);
}
}catch(err){
return loggerUtils.localExceptionHandler(err);
}
}
module.exports = mongoose.model("migration", migrationSchema);