|
|
|
@ -1,7 +1,7 @@
|
|
|
|
|
use std::collections::HashSet;
|
|
|
|
|
use std::{fs::File, io::Read, collections::HashMap, path::Path};
|
|
|
|
|
|
|
|
|
|
use poise::serenity_prelude::Http;
|
|
|
|
|
use poise::serenity_prelude::{SerenityError, Http};
|
|
|
|
|
use r2d2::{Pool, PooledConnection};
|
|
|
|
|
use r2d2_sqlite::SqliteConnectionManager;
|
|
|
|
|
use r2d2_sqlite::rusqlite::{self, params};
|
|
|
|
@ -25,6 +25,12 @@ pub enum DatabaseError {
|
|
|
|
|
Pool(r2d2::Error),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(From, Debug)]
|
|
|
|
|
pub enum LoadLegacyError {
|
|
|
|
|
Database(DatabaseError),
|
|
|
|
|
Serenity(SerenityError),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
type Result<T> = std::result::Result<T, DatabaseError>;
|
|
|
|
|
|
|
|
|
|
impl Database {
|
|
|
|
@ -58,7 +64,8 @@ impl Database {
|
|
|
|
|
id INTEGER PRIMARY KEY,
|
|
|
|
|
name TEXT NOT NULL,
|
|
|
|
|
discriminator INTEGER NOT NULL,
|
|
|
|
|
avatar TEXT
|
|
|
|
|
avatar TEXT,
|
|
|
|
|
deleted INTEGER NOT NULL
|
|
|
|
|
)",
|
|
|
|
|
params![],
|
|
|
|
|
)?;
|
|
|
|
@ -76,85 +83,80 @@ impl Database {
|
|
|
|
|
.is_ok())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub async fn load_legacy(&self, http: &Http) -> Result<()> {
|
|
|
|
|
pub async fn load_legacy(&self, http: &Http) -> std::result::Result<(), LoadLegacyError> {
|
|
|
|
|
let latest_challenge = get_challenge_number();
|
|
|
|
|
// HashMap of archived users that are no longer sharing a server with 字ちゃん
|
|
|
|
|
// Their historical usernames and discriminators will be used
|
|
|
|
|
let mut archived_users = HashMap::new();
|
|
|
|
|
let conn = self.conn()?;
|
|
|
|
|
let conn = self.conn().map_err(|error| DatabaseError::Pool(error))?;
|
|
|
|
|
for n in 1..=latest_challenge {
|
|
|
|
|
println!("Loading legacy challenge {n}/{latest_challenge}...");
|
|
|
|
|
let mut file = File::open(format!("data/challenges/{n}.json")).unwrap();
|
|
|
|
|
let mut contents = String::new();
|
|
|
|
|
file.read_to_string(&mut contents).unwrap();
|
|
|
|
|
for (legacy, submissions) in serde_json::from_str::<Vec<LegacySubmission>>(&contents)
|
|
|
|
|
.unwrap()
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|legacy| (legacy, legacy.parse().unwrap())) {
|
|
|
|
|
let mut already_updated = false;
|
|
|
|
|
for submission in submissions {
|
|
|
|
|
conn.execute(
|
|
|
|
|
"INSERT INTO Submission(author_id, timestamp, image, challenge) VALUES (?1, ?2, ?3, ?4)",
|
|
|
|
|
params![
|
|
|
|
|
&submission.author_id,
|
|
|
|
|
&submission.timestamp,
|
|
|
|
|
&submission.image,
|
|
|
|
|
n,
|
|
|
|
|
]
|
|
|
|
|
)?;
|
|
|
|
|
let id = submission.author_id;
|
|
|
|
|
if !self.has_submitted(id)? {
|
|
|
|
|
println!("Fetching user {id}...");
|
|
|
|
|
let previously_archived = archived_users.contains_key(&id);
|
|
|
|
|
// Parse archived user out of legacy and insert into HashMap
|
|
|
|
|
let mut archive = || {
|
|
|
|
|
if already_updated {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
if previously_archived {
|
|
|
|
|
println!("Updating archived data for user {id}");
|
|
|
|
|
} else {
|
|
|
|
|
println!("Adding archived data for user {id}");
|
|
|
|
|
}
|
|
|
|
|
let (name, discriminator) = {
|
|
|
|
|
let mut iter = legacy.username.split('#');
|
|
|
|
|
let name = iter.next().unwrap().to_owned();
|
|
|
|
|
let discriminator = iter
|
|
|
|
|
.next()
|
|
|
|
|
.map(|str| str.parse().unwrap())
|
|
|
|
|
.unwrap_or(0);
|
|
|
|
|
(name, discriminator)
|
|
|
|
|
};
|
|
|
|
|
for legacy in serde_json::from_str::<Vec<LegacySubmission>>(&contents).unwrap() {
|
|
|
|
|
let id = legacy.id;
|
|
|
|
|
if !self.has_submitted(id)? {
|
|
|
|
|
println!("Fetching user {id}...");
|
|
|
|
|
// If it already contains the archived user,
|
|
|
|
|
// overwrite write their data since they may have updated
|
|
|
|
|
// their username/discriminator since their previous submission
|
|
|
|
|
match archived_users.get(&id) {
|
|
|
|
|
Some(User { deleted, .. }) => {
|
|
|
|
|
archived_users.insert(id, User {
|
|
|
|
|
id,
|
|
|
|
|
avatar: None,
|
|
|
|
|
deleted: *deleted,
|
|
|
|
|
..User::from_username(&legacy.username)
|
|
|
|
|
});
|
|
|
|
|
},
|
|
|
|
|
None => match User::fetch(http, id).await {
|
|
|
|
|
Ok(User { deleted: true, .. }) => {
|
|
|
|
|
archived_users.insert(id, User {
|
|
|
|
|
id,
|
|
|
|
|
name,
|
|
|
|
|
discriminator,
|
|
|
|
|
avatar: None,
|
|
|
|
|
deleted: true,
|
|
|
|
|
..User::from_username(&legacy.username)
|
|
|
|
|
});
|
|
|
|
|
already_updated = true;
|
|
|
|
|
};
|
|
|
|
|
if previously_archived {
|
|
|
|
|
// If it already contains the archived user,
|
|
|
|
|
// overwrite write their data since they may have updated
|
|
|
|
|
// their username/discriminator since their previous submission
|
|
|
|
|
archive();
|
|
|
|
|
} else {
|
|
|
|
|
match User::fetch(http, submission.author_id).await {
|
|
|
|
|
Ok(user) => {
|
|
|
|
|
conn.execute(
|
|
|
|
|
"INSERT INTO User(id, name, discriminator, avatar) VALUES (?1, ?2, ?3, ?4)",
|
|
|
|
|
params![user.id, user.name, user.discriminator, user.avatar]
|
|
|
|
|
)?;
|
|
|
|
|
},
|
|
|
|
|
Err(error) => {
|
|
|
|
|
println!("Failed to fetch user {}, may update archived data: {error}", submission.author_id);
|
|
|
|
|
archive();
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
Ok(user) => {
|
|
|
|
|
conn.execute(
|
|
|
|
|
"INSERT INTO User(id, name, discriminator, avatar, deleted) VALUES (?1, ?2, ?3, ?4, ?5)",
|
|
|
|
|
params![user.id, user.name, user.discriminator, user.avatar, user.deleted]
|
|
|
|
|
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
|
|
|
|
},
|
|
|
|
|
Err(error) if error.to_string().eq("Unknown User") => {
|
|
|
|
|
// This will also be called in the case of an invalid user ID
|
|
|
|
|
println!("Failed to fetch user {id}, adding to archive");
|
|
|
|
|
archived_users.insert(id, User {
|
|
|
|
|
id,
|
|
|
|
|
avatar: None,
|
|
|
|
|
deleted: false,
|
|
|
|
|
..User::from_username(&legacy.username)
|
|
|
|
|
});
|
|
|
|
|
},
|
|
|
|
|
Err(error) => return Err(LoadLegacyError::Serenity(error)),
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
for submission in legacy.parse().unwrap() {
|
|
|
|
|
conn.execute(
|
|
|
|
|
"INSERT INTO Submission(author_id, timestamp, image, challenge) VALUES (?1, ?2, ?3, ?4)",
|
|
|
|
|
params![
|
|
|
|
|
&submission.author_id,
|
|
|
|
|
&submission.timestamp,
|
|
|
|
|
&submission.image,
|
|
|
|
|
n,
|
|
|
|
|
]
|
|
|
|
|
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
for (_id, user) in archived_users {
|
|
|
|
|
conn.execute(
|
|
|
|
|
"INSERT INTO USER (id, name, discriminator, avatar, deleted) VALUES (?1, ?2, ?3, ?4, ?5)",
|
|
|
|
|
params![user.id, user.name, user.discriminator, user.avatar, user.deleted]
|
|
|
|
|
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
@ -191,13 +193,14 @@ impl Database {
|
|
|
|
|
.iter()
|
|
|
|
|
// u64 must be converted to String for templates
|
|
|
|
|
.map(|id| -> Result<(String, User)> {
|
|
|
|
|
match conn.prepare("SELECT name, discriminator, avatar FROM User WHERE id = ?1") {
|
|
|
|
|
match conn.prepare("SELECT name, discriminator, avatar, deleted FROM User WHERE id = ?1") {
|
|
|
|
|
Ok(mut statement) => statement.query_row(params![id], |row| {
|
|
|
|
|
Ok((id.to_string(), User {
|
|
|
|
|
id: *id,
|
|
|
|
|
name: row.get(0)?,
|
|
|
|
|
discriminator: row.get(1)?,
|
|
|
|
|
avatar: row.get(2)?,
|
|
|
|
|
deleted: row.get(3)?,
|
|
|
|
|
}))
|
|
|
|
|
}).map_err(DatabaseError::Rusqlite),
|
|
|
|
|
Err(error) => Err(DatabaseError::Rusqlite(error)),
|
|
|
|
|