Want to contribute? Fork me on Codeberg.org!
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

221 lines
9.1 KiB

use std::collections::HashSet;
use std::{fs::File, io::Read, collections::HashMap, path::Path};
use poise::serenity_prelude::Http;
use r2d2::{Pool, PooledConnection};
use r2d2_sqlite::SqliteConnectionManager;
use r2d2_sqlite::rusqlite::{self, params};
use derive_more::From;
use crate::{utils::get_challenge_number, models::User};
use super::{LegacySubmission, Submission};
pub struct Database {
// Must be Arc because Connection contains RefCell,
// which cannot be shared between threads safely
connection_pool: Pool<SqliteConnectionManager>,
}
const DATABASE_FILENAME: &str = "database.db";
#[derive(From, Debug)]
pub enum DatabaseError {
Rusqlite(rusqlite::Error),
Pool(r2d2::Error),
}
type Result<T> = std::result::Result<T, DatabaseError>;
impl Database {
pub fn file_exists() -> bool {
Path::new(DATABASE_FILENAME).exists()
}
pub fn new(
testing: bool,
) -> Result<Self> {
let connection_manager = if testing {
SqliteConnectionManager::memory()
} else {
SqliteConnectionManager::file(DATABASE_FILENAME)
};
let connection_pool = Pool::new(connection_manager)?;
let conn = connection_pool.get()?;
conn.execute(
"CREATE TABLE IF NOT EXISTS Submission (
id INTEGER PRIMARY KEY,
author_id INTEGER NOT NULL,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
image TEXT NOT NULL,
challenge INTEGER NOT NULL,
FOREIGN KEY (author_id) REFERENCES User(id)
)",
params![],
)?;
conn.execute(
"CREATE TABLE IF NOT EXISTS User (
id INTEGER PRIMARY KEY,
name TEXT NOT NULL,
discriminator INTEGER NOT NULL,
avatar TEXT
)",
params![],
)?;
Ok(Self { connection_pool })
}
fn conn(&self) -> std::result::Result<PooledConnection<SqliteConnectionManager>, r2d2::Error> {
self.connection_pool.get()
}
pub fn has_submitted(&self, user_id: u64) -> Result<bool> {
Ok(self.conn()?
.prepare("SELECT 1 FROM User WHERE id = ?1 LIMIT 1")?
.query_row(params![user_id], |_row| Ok(()))
.is_ok())
}
pub async fn load_legacy(&self, http: &Http) -> Result<()> {
let latest_challenge = get_challenge_number();
// HashMap of archived users that are no longer sharing a server with 字ちゃん
// Their historical usernames and discriminators will be used
let mut archived_users = HashMap::new();
let conn = self.conn()?;
for n in 1..=latest_challenge {
println!("Loading legacy challenge {n}/{latest_challenge}...");
let mut file = File::open(format!("data/challenges/{n}.json")).unwrap();
let mut contents = String::new();
file.read_to_string(&mut contents).unwrap();
for (legacy, submissions) in serde_json::from_str::<Vec<LegacySubmission>>(&contents)
.unwrap()
.iter()
.map(|legacy| (legacy, legacy.parse().unwrap())) {
let mut already_updated = false;
for submission in submissions {
conn.execute(
"INSERT INTO Submission(author_id, timestamp, image, challenge) VALUES (?1, ?2, ?3, ?4)",
params![
&submission.author_id,
&submission.timestamp,
&submission.image,
n,
]
)?;
let id = submission.author_id;
if !self.has_submitted(id)? {
println!("Fetching user {id}...");
let previously_archived = archived_users.contains_key(&id);
// Parse archived user out of legacy and insert into HashMap
let mut archive = || {
if already_updated {
return;
}
if previously_archived {
println!("Updating archived data for user {id}");
} else {
println!("Adding archived data for user {id}");
}
let (name, discriminator) = {
let mut iter = legacy.username.split('#');
let name = iter.next().unwrap().to_owned();
let discriminator = iter
.next()
.map(|str| str.parse().unwrap())
.unwrap_or(0);
(name, discriminator)
};
archived_users.insert(id, User {
id,
name,
discriminator,
avatar: None,
});
already_updated = true;
};
if previously_archived {
// If it already contains the archived user,
// overwrite write their data since they may have updated
// their username/discriminator since their previous submission
archive();
} else {
match User::fetch(http, submission.author_id).await {
Ok(user) => {
conn.execute(
"INSERT INTO User(id, name, discriminator, avatar) VALUES (?1, ?2, ?3, ?4)",
params![user.id, user.name, user.discriminator, user.avatar]
)?;
},
Err(error) => {
println!("Failed to fetch user {}, may update archived data: {error}", submission.author_id);
archive();
},
};
}
}
}
}
}
Ok(())
}
pub fn get_challenge_user_data(&self, challenge: u32) -> Result<(Vec<Submission>, HashMap<String, User>)> {
let submissions = self.get_submissions(challenge)?;
let users = self.get_users({
let mut user_ids = HashSet::new();
for submission in &submissions {
user_ids.insert(submission.author_id);
}
user_ids
})?;
Ok((submissions, users))
}
pub fn get_submissions(&self, challenge: u32) -> Result<Vec<Submission>> {
Ok(self.conn()?
.prepare("SELECT author_id, timestamp, image FROM Submission WHERE challenge = ?1")?
.query_map(params![challenge], |row| {
Ok(Submission {
author_id: row.get(0)?,
timestamp: row.get(1)?,
image: row.get(2)?,
})
})?
.collect::<std::result::Result<Vec<Submission>, rusqlite::Error>>()?)
}
fn get_users(&self, users: HashSet<u64>) -> Result<HashMap<String, User>> {
let conn = self.conn()?;
// Not sure why derive_more::From is unable to convert these errors
users
.iter()
// u64 must be converted to String for templates
.map(|id| -> Result<(String, User)> {
match conn.prepare("SELECT name, discriminator, avatar FROM User WHERE id = ?1") {
Ok(mut statement) => statement.query_row(params![id], |row| {
Ok((id.to_string(), User {
id: *id,
name: row.get(0)?,
discriminator: row.get(1)?,
avatar: row.get(2)?,
}))
}).map_err(|error| DatabaseError::Rusqlite(error)),
Err(error) => Err(DatabaseError::Rusqlite(error)),
}
})
.collect()
}
#[allow(dead_code)]
pub fn refresh_users(&self) -> rusqlite::Result<()> {
// Periodically refresh all changable user data (name, discriminator, avatar)
// Ideally this should run periodically.
todo!()
}
#[allow(dead_code, unused_variables)]
pub fn insert_submission(&self, submission: &Submission) -> rusqlite::Result<()> {
// For new submissions only
todo!()
}
}