Better legacy migration, deleted user handling
This commit is contained in:
parent
be8122d356
commit
a048d313bf
6 changed files with 121 additions and 73 deletions
|
@ -1,7 +1,7 @@
|
|||
use std::collections::HashSet;
|
||||
use std::{fs::File, io::Read, collections::HashMap, path::Path};
|
||||
|
||||
use poise::serenity_prelude::Http;
|
||||
use poise::serenity_prelude::{SerenityError, Http};
|
||||
use r2d2::{Pool, PooledConnection};
|
||||
use r2d2_sqlite::SqliteConnectionManager;
|
||||
use r2d2_sqlite::rusqlite::{self, params};
|
||||
|
@ -25,6 +25,12 @@ pub enum DatabaseError {
|
|||
Pool(r2d2::Error),
|
||||
}
|
||||
|
||||
#[derive(From, Debug)]
|
||||
pub enum LoadLegacyError {
|
||||
Database(DatabaseError),
|
||||
Serenity(SerenityError),
|
||||
}
|
||||
|
||||
type Result<T> = std::result::Result<T, DatabaseError>;
|
||||
|
||||
impl Database {
|
||||
|
@ -58,7 +64,8 @@ impl Database {
|
|||
id INTEGER PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
discriminator INTEGER NOT NULL,
|
||||
avatar TEXT
|
||||
avatar TEXT,
|
||||
deleted INTEGER NOT NULL
|
||||
)",
|
||||
params![],
|
||||
)?;
|
||||
|
@ -76,85 +83,80 @@ impl Database {
|
|||
.is_ok())
|
||||
}
|
||||
|
||||
pub async fn load_legacy(&self, http: &Http) -> Result<()> {
|
||||
pub async fn load_legacy(&self, http: &Http) -> std::result::Result<(), LoadLegacyError> {
|
||||
let latest_challenge = get_challenge_number();
|
||||
// HashMap of archived users that are no longer sharing a server with 字ちゃん
|
||||
// Their historical usernames and discriminators will be used
|
||||
let mut archived_users = HashMap::new();
|
||||
let conn = self.conn()?;
|
||||
let conn = self.conn().map_err(|error| DatabaseError::Pool(error))?;
|
||||
for n in 1..=latest_challenge {
|
||||
println!("Loading legacy challenge {n}/{latest_challenge}...");
|
||||
let mut file = File::open(format!("data/challenges/{n}.json")).unwrap();
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents).unwrap();
|
||||
for (legacy, submissions) in serde_json::from_str::<Vec<LegacySubmission>>(&contents)
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|legacy| (legacy, legacy.parse().unwrap())) {
|
||||
let mut already_updated = false;
|
||||
for submission in submissions {
|
||||
conn.execute(
|
||||
"INSERT INTO Submission(author_id, timestamp, image, challenge) VALUES (?1, ?2, ?3, ?4)",
|
||||
params![
|
||||
&submission.author_id,
|
||||
&submission.timestamp,
|
||||
&submission.image,
|
||||
n,
|
||||
]
|
||||
)?;
|
||||
let id = submission.author_id;
|
||||
if !self.has_submitted(id)? {
|
||||
println!("Fetching user {id}...");
|
||||
let previously_archived = archived_users.contains_key(&id);
|
||||
// Parse archived user out of legacy and insert into HashMap
|
||||
let mut archive = || {
|
||||
if already_updated {
|
||||
return;
|
||||
}
|
||||
if previously_archived {
|
||||
println!("Updating archived data for user {id}");
|
||||
} else {
|
||||
println!("Adding archived data for user {id}");
|
||||
}
|
||||
let (name, discriminator) = {
|
||||
let mut iter = legacy.username.split('#');
|
||||
let name = iter.next().unwrap().to_owned();
|
||||
let discriminator = iter
|
||||
.next()
|
||||
.map(|str| str.parse().unwrap())
|
||||
.unwrap_or(0);
|
||||
(name, discriminator)
|
||||
};
|
||||
for legacy in serde_json::from_str::<Vec<LegacySubmission>>(&contents).unwrap() {
|
||||
let id = legacy.id;
|
||||
if !self.has_submitted(id)? {
|
||||
println!("Fetching user {id}...");
|
||||
// If it already contains the archived user,
|
||||
// overwrite write their data since they may have updated
|
||||
// their username/discriminator since their previous submission
|
||||
match archived_users.get(&id) {
|
||||
Some(User { deleted, .. }) => {
|
||||
archived_users.insert(id, User {
|
||||
id,
|
||||
avatar: None,
|
||||
deleted: *deleted,
|
||||
..User::from_username(&legacy.username)
|
||||
});
|
||||
},
|
||||
None => match User::fetch(http, id).await {
|
||||
Ok(User { deleted: true, .. }) => {
|
||||
archived_users.insert(id, User {
|
||||
id,
|
||||
name,
|
||||
discriminator,
|
||||
avatar: None,
|
||||
deleted: true,
|
||||
..User::from_username(&legacy.username)
|
||||
});
|
||||
already_updated = true;
|
||||
};
|
||||
if previously_archived {
|
||||
// If it already contains the archived user,
|
||||
// overwrite write their data since they may have updated
|
||||
// their username/discriminator since their previous submission
|
||||
archive();
|
||||
} else {
|
||||
match User::fetch(http, submission.author_id).await {
|
||||
Ok(user) => {
|
||||
conn.execute(
|
||||
"INSERT INTO User(id, name, discriminator, avatar) VALUES (?1, ?2, ?3, ?4)",
|
||||
params![user.id, user.name, user.discriminator, user.avatar]
|
||||
)?;
|
||||
},
|
||||
Err(error) => {
|
||||
println!("Failed to fetch user {}, may update archived data: {error}", submission.author_id);
|
||||
archive();
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
Ok(user) => {
|
||||
conn.execute(
|
||||
"INSERT INTO User(id, name, discriminator, avatar, deleted) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![user.id, user.name, user.discriminator, user.avatar, user.deleted]
|
||||
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
||||
},
|
||||
Err(error) if error.to_string().eq("Unknown User") => {
|
||||
// This will also be called in the case of an invalid user ID
|
||||
println!("Failed to fetch user {id}, adding to archive");
|
||||
archived_users.insert(id, User {
|
||||
id,
|
||||
avatar: None,
|
||||
deleted: false,
|
||||
..User::from_username(&legacy.username)
|
||||
});
|
||||
},
|
||||
Err(error) => return Err(LoadLegacyError::Serenity(error)),
|
||||
},
|
||||
};
|
||||
}
|
||||
for submission in legacy.parse().unwrap() {
|
||||
conn.execute(
|
||||
"INSERT INTO Submission(author_id, timestamp, image, challenge) VALUES (?1, ?2, ?3, ?4)",
|
||||
params![
|
||||
&submission.author_id,
|
||||
&submission.timestamp,
|
||||
&submission.image,
|
||||
n,
|
||||
]
|
||||
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (_id, user) in archived_users {
|
||||
conn.execute(
|
||||
"INSERT INTO USER (id, name, discriminator, avatar, deleted) VALUES (?1, ?2, ?3, ?4, ?5)",
|
||||
params![user.id, user.name, user.discriminator, user.avatar, user.deleted]
|
||||
).map_err(|error| DatabaseError::Rusqlite(error))?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
@ -191,13 +193,14 @@ impl Database {
|
|||
.iter()
|
||||
// u64 must be converted to String for templates
|
||||
.map(|id| -> Result<(String, User)> {
|
||||
match conn.prepare("SELECT name, discriminator, avatar FROM User WHERE id = ?1") {
|
||||
match conn.prepare("SELECT name, discriminator, avatar, deleted FROM User WHERE id = ?1") {
|
||||
Ok(mut statement) => statement.query_row(params![id], |row| {
|
||||
Ok((id.to_string(), User {
|
||||
id: *id,
|
||||
name: row.get(0)?,
|
||||
discriminator: row.get(1)?,
|
||||
avatar: row.get(2)?,
|
||||
deleted: row.get(3)?,
|
||||
}))
|
||||
}).map_err(DatabaseError::Rusqlite),
|
||||
Err(error) => Err(DatabaseError::Rusqlite(error)),
|
||||
|
|
|
@ -6,11 +6,20 @@ use super::Submission;
|
|||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LegacySubmission {
|
||||
pub id: String,
|
||||
#[serde(deserialize_with = "deserialize_id")]
|
||||
pub id: u64,
|
||||
pub images: Vec<String>,
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
pub fn deserialize_id<'de, D>(deserializer: D) -> Result<u64, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let id_str: &str = serde::Deserialize::deserialize(deserializer)?;
|
||||
id_str.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
#[derive(From, Debug)]
|
||||
pub enum LegacySubmissionParseError {
|
||||
BadAuthorId(std::num::ParseIntError),
|
||||
|
@ -18,7 +27,7 @@ pub enum LegacySubmissionParseError {
|
|||
|
||||
impl LegacySubmission {
|
||||
pub fn parse(&self) -> Result<Vec<Submission>, LegacySubmissionParseError> {
|
||||
let author_id = self.id.parse()?;
|
||||
let author_id = self.id;
|
||||
Ok(self.images
|
||||
.iter()
|
||||
.map(|image| {
|
||||
|
|
|
@ -6,6 +6,7 @@ mod tests;
|
|||
use chrono::Utc;
|
||||
use derive_more::From;
|
||||
use poise::serenity_prelude::{self, UserId, Http};
|
||||
use regex::Regex;
|
||||
use reqwest::StatusCode;
|
||||
use rocket::http::{Cookie, CookieJar};
|
||||
use serial::*;
|
||||
|
@ -27,6 +28,7 @@ pub struct User {
|
|||
#[serde(deserialize_with = "deserialize_discriminator")]
|
||||
pub discriminator: u16,
|
||||
pub avatar: Option<String>,
|
||||
pub deleted: bool,
|
||||
}
|
||||
|
||||
impl Username for User {
|
||||
|
@ -38,12 +40,36 @@ impl Username for User {
|
|||
}
|
||||
}
|
||||
|
||||
fn is_name_deleted(name: &str) -> bool {
|
||||
Regex::new(r"Deleted User [a-f0-9]{8}").unwrap().is_match(name)
|
||||
}
|
||||
|
||||
impl User {
|
||||
pub fn from_username(username: &str) -> Self {
|
||||
let (name, discriminator) = {
|
||||
let mut iter = username.split('#');
|
||||
let name = iter.next().unwrap().to_owned();
|
||||
let discriminator = iter
|
||||
.next()
|
||||
.map(|str| str.parse().unwrap())
|
||||
.unwrap_or(0);
|
||||
(name, discriminator)
|
||||
};
|
||||
Self {
|
||||
id: 0,
|
||||
name,
|
||||
discriminator,
|
||||
avatar: None,
|
||||
deleted: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn fetch(http: &Http, id: u64) -> serenity_prelude::Result<Self> {
|
||||
let user = UserId(id).to_user(http).await?;
|
||||
Ok(Self {
|
||||
id,
|
||||
avatar: user.avatar,
|
||||
deleted: is_name_deleted(&user.name),
|
||||
name: user.name,
|
||||
discriminator: user.discriminator,
|
||||
})
|
||||
|
@ -51,7 +77,6 @@ impl User {
|
|||
|
||||
fn avatar(&self) -> String {
|
||||
match &self.avatar {
|
||||
// https://cdn.discordapp.com/avatars/67795786229878784/e58524afe21b5058fc6f3cdc19aea8e1.webp?size=1024
|
||||
Some(avatar) => format!(
|
||||
"https://cdn.discordapp.com/avatars/{}/{}.{}?size=1024",
|
||||
self.id,
|
||||
|
@ -118,6 +143,7 @@ impl SessionUser {
|
|||
name: parse_cookie_value(cookies, USER_NAME_COOKIE)?,
|
||||
discriminator: parse_cookie_value(cookies, USER_DISCRIMINATOR_COOKIE)?,
|
||||
avatar: Some(parse_cookie_value(cookies, USER_AVATAR_COOKIE)?),
|
||||
deleted: false,
|
||||
}))
|
||||
}
|
||||
|
||||
|
|
|
@ -23,11 +23,12 @@ impl Serialize for User {
|
|||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let mut state = serializer.serialize_struct("User", 5)?;
|
||||
let mut state = serializer.serialize_struct("User", 6)?;
|
||||
state.serialize_field("id", &self.id)?;
|
||||
state.serialize_field("name", &self.name)?;
|
||||
state.serialize_field("discriminator", &self.discriminator)?;
|
||||
state.serialize_field("avatar", &self.avatar())?;
|
||||
state.serialize_field("deleted", &self.deleted)?;
|
||||
state.serialize_field("username", &self.username())?;
|
||||
state.end()
|
||||
}
|
||||
|
|
|
@ -19,3 +19,12 @@ fn test_new_username() {
|
|||
let user = test_user("test", 0);
|
||||
assert_eq!(user.username(), "test");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn is_name_deleted() {
|
||||
use super::is_name_deleted;
|
||||
assert!(is_name_deleted("Deleted User ce34a7da"));
|
||||
assert!(!is_name_deleted("Deleted User Ce34a7da")); // capital letter in hex
|
||||
assert!(!is_name_deleted("Deleted User ce34a7d")); // hex too short
|
||||
assert!(!is_name_deleted("Deleted User")); // no hex
|
||||
}
|
|
@ -142,7 +142,7 @@
|
|||
{% set author = users[submission.author_id] %}
|
||||
<figure>
|
||||
<img src="/{{ challenge }}/{{ submission.image }}" alt="{{ author.username }}'s submission" onclick="submissionModal('{{ submission.image }}')">
|
||||
<figcaption><a href="https://discord.com/users/{{ author.id }}" target="_blank">{{ author.username }}</a></figcaption>
|
||||
<figcaption>{% if not author.deleted %}<a href="https://discord.com/users/{{ author.id }}" target="_blank">{% endif %}{{ author.username }}{% if author.deleted %} (deleted account){% else %}</a>{% endif %}</figcaption>
|
||||
</figure>
|
||||
{% endfor %}
|
||||
</div>
|
||||
|
|
Loading…
Add table
Reference in a new issue