diff --git a/.gitignore b/.gitignore index 6985cf1bd..5eb4ac551 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,6 @@ Cargo.lock # MSVC Windows builds of rustc generate these, which store debugging information *.pdb + +# Database file +*.sqlite3 \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 13619ee7f..3195c015c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,6 +26,8 @@ rusqlite_migration = { version = "1.0" } rand = { version="0.8.5" } serde = {version="1.0", features = ["derive"]} serde_json = { version = "1.0", features = ["raw_value"] } +diesel = { version = "2.1.4", features = ["sqlite", "returning_clauses_for_sqlite_3_35"] } +diesel_migrations = "2.1.0" [dev-dependencies] uuid = { version = "1.6.1", features = ["serde", "v4"] } diff --git a/diesel.toml b/diesel.toml new file mode 100644 index 000000000..7c986a1d6 --- /dev/null +++ b/diesel.toml @@ -0,0 +1,9 @@ +# For documentation on how to configure this file, +# see https://diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "src/store/schema.rs" +custom_type_derives = ["diesel::query_builder::QueryId"] + +[migrations_directory] +dir = "src/store/migrations" diff --git a/src/cli/account.rs b/src/cli/account.rs index d1ec5bdcd..34c8a24f2 100644 --- a/src/cli/account.rs +++ b/src/cli/account.rs @@ -72,7 +72,7 @@ impl AccountCmd { // LIST ACCOUNTS // ================================================================================================ -fn list_accounts(client: Client) -> Result<(), String> { +fn list_accounts(mut client: Client) -> Result<(), String> { println!("{}", "-".repeat(240)); println!( "{0: <18} | {1: <66} | {2: <66} | {3: <66} | {4: <15}", @@ -100,7 +100,7 @@ fn list_accounts(client: Client) -> Result<(), String> { // ================================================================================================ fn new_account( - client: Client, + mut client: Client, template: &Option, deploy: bool, ) -> Result<(), String> { @@ -151,20 +151,9 @@ fn new_account( } .map_err(|err| err.to_string())?; - // TODO: Make these inserts atomic through a single transaction client - .store() - .insert_account_code(account.code()) - .and_then(|_| client.store().insert_account_storage(account.storage())) - .and_then(|_| client.store().insert_account_vault(account.vault())) - .and_then(|_| client.store().insert_account(&account)) - .map(|_| { - println!( - "Succesfully created and stored Account ID: {}", - account.id() - ) - }) - .map_err(|x| x.to_string())?; + .insert_account_with_metadata(&account) + .map_err(|err| err.to_string())?; Ok(()) } diff --git a/src/errors.rs b/src/errors.rs index 53fcd0c0c..e91643219 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -36,7 +36,7 @@ pub enum StoreError { ConnectionError(rusqlite::Error), MigrationError(rusqlite_migration::Error), ColumnParsingError(rusqlite::Error), - QueryError(rusqlite::Error), + QueryError(diesel::result::Error), InputSerializationError(serde_json::Error), DataDeserializationError(serde_json::Error), InputNoteNotFound(Digest), diff --git a/src/lib.rs b/src/lib.rs index 741599586..5be64c967 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -11,6 +11,10 @@ use store::Store; pub mod errors; use errors::ClientError; +// from diesel guide, eval keeping this here +// pub mod models; // can we move this to /store ? +// pub mod schema; // can we move this to /store ? + // MIDEN CLIENT // ================================================================================================ @@ -51,13 +55,23 @@ impl Client { &self.store } + // ACCOUNT INSERTION + // -------------------------------------------------------------------------------------------- + + /// Inserts a new account into the client's store. + pub fn insert_account_with_metadata(&mut self, account: &Account) -> Result<(), ClientError> { + self.store + .insert_account_with_metadata(account) + .map_err(ClientError::StoreError) + } + // ACCOUNT DATA RETRIEVAL // -------------------------------------------------------------------------------------------- /// Returns summary info about the accounts managed by this client. /// /// TODO: replace `AccountStub` with a more relevant structure. - pub fn get_accounts(&self) -> Result, ClientError> { + pub fn get_accounts(&mut self) -> Result, ClientError> { self.store.get_accounts().map_err(|err| err.into()) } @@ -180,6 +194,22 @@ mod tests { account::MockAccountType, notes::AssetPreservationStatus, transaction::mock_inputs, }; + #[test] + /// This test is only to ensure that the database is created correctly. + fn test_get_accounts() { + // generate test store path + let store_path = create_test_store_path(); + + // generate test client + let mut client = super::Client::new(super::ClientConfig::new( + store_path.into_os_string().into_string().unwrap(), + super::Endpoint::default(), + )) + .unwrap(); + + client.get_accounts().unwrap(); + } + #[test] fn test_input_notes_round_trip() { // generate test store path diff --git a/src/store/diesel_guide.md b/src/store/diesel_guide.md new file mode 100644 index 000000000..af204e953 --- /dev/null +++ b/src/store/diesel_guide.md @@ -0,0 +1,104 @@ +# Diesel setup Instructions +[source](https://diesel.rs/guides/getting-started.html) + +**Note:** this guide only needs to be followed in case `diesel_migrations` crate is not used, which is **not** the case for this project. This guide is only for reference on how to setup diesel with static sql files. + +## Install libsqlite3: + - Ubuntu: `sudo apt intsall libsqlite3-dev` + - MacOS: preinstalled in modern versions. + +## Install Diesel CLI: +`cargo install diesel_cli --no-default-features --features sqlite` + +## Database creation: +`diesel setup --database-url=store.sqlite3` + +This will create a `store.sqlite3` file in the current root of the proyect. + +## Modify `diesel.toml`: + ```toml + # For documentation on how to configure this file, + # see https://diesel.rs/guides/configuring-diesel-cli + + [print_schema] + file = "src/store/schema.rs" + custom_type_derives = ["diesel::query_builder::QueryId"] + + [migrations_directory] + dir = "src/store/migrations" + ``` + +## Generate migration file: + `diesel migration generate miden_client_store --database-url=store.sqlite3` + +## Modify `src/store/migrations/TIMESTAMP_miden_client_store` file: + + ```sql + -- Create account_code table + CREATE TABLE account_code ( + root BLOB NOT NULL, -- root of the Merkle tree for all exported procedures in account module. + procedures BLOB NOT NULL, -- serialized procedure digests for the account code. + module BLOB NOT NULL, -- serialized ModuleAst for the account code. + PRIMARY KEY (root) + ); + + -- Create account_storage table + CREATE TABLE account_storage ( + root BLOB NOT NULL, -- root of the account storage Merkle tree. + slots BLOB NOT NULL, -- serialized key-value pair of non-empty account slots. + PRIMARY KEY (root) + ); + + -- Create account_vaults table + CREATE TABLE account_vaults ( + root BLOB NOT NULL, -- root of the Merkle tree for the account vault. + assets BLOB NOT NULL, -- serialized account vault assets. + PRIMARY KEY (root) + ); + + -- Create account_keys table + CREATE TABLE account_keys ( + account_id UNSIGNED BIG INT NOT NULL, -- ID of the account + key_pair BLOB NOT NULL, -- key pair + PRIMARY KEY (account_id), + FOREIGN KEY (account_id) REFERENCES accounts(id) + ); + + -- Create accounts table + CREATE TABLE accounts ( + id UNSIGNED BIG INT NOT NULL, -- account ID. + code_root BLOB NOT NULL, -- root of the account_code Merkle tree. + storage_root BLOB NOT NULL, -- root of the account_storage Merkle tree. + vault_root BLOB NOT NULL, -- root of the account_vault Merkle tree. + nonce BIGINT NOT NULL, -- account nonce. + committed BOOLEAN NOT NULL, -- true if recorded, false if not. + PRIMARY KEY (id), + FOREIGN KEY (code_root) REFERENCES account_code(root), + FOREIGN KEY (storage_root) REFERENCES account_storage(root), + FOREIGN KEY (vault_root) REFERENCES account_vaults(root) + ); + + -- Create input notes table + CREATE TABLE input_notes ( + hash BLOB NOT NULL, -- the note hash + nullifier BLOB NOT NULL, -- the nullifier of the note + script BLOB NOT NULL, -- the serialized NoteScript, including script hash and ProgramAst + vault BLOB NOT NULL, -- the serialized NoteVault, including vault hash and list of assets + inputs BLOB NOT NULL, -- the serialized NoteInputs, including inputs hash and list of inputs + serial_num BLOB NOT NULL, -- the note serial number + sender_id UNSIGNED BIG INT NOT NULL, -- the account ID of the sender + tag UNSIGNED BIG INT NOT NULL, -- the note tag + num_assets UNSIGNED BIG INT NOT NULL, -- the number of assets in the note + inclusion_proof BLOB NOT NULL, -- the inclusion proof of the note against a block number + recipients BLOB NOT NULL, -- a list of account IDs of accounts which can consume this note + status TEXT CHECK( status IN ('pending', 'committed')), -- the status of the note - either pending or committed + commit_height UNSIGNED BIG INT NOT NULL, -- the block number at which the note was included into the chain + PRIMARY KEY (hash) + ); +``` + +## Apply migration: +`diesel migration run --database-url=store.sqlite3` + +## Notice +If you followed this guide, some parts of the code will need to be modified to mark the default database url as `store.sqlite`. \ No newline at end of file diff --git a/src/store/migrations.rs b/src/store/migrations.rs deleted file mode 100644 index 52ce8da99..000000000 --- a/src/store/migrations.rs +++ /dev/null @@ -1,21 +0,0 @@ -use super::StoreError; -use lazy_static::lazy_static; -use rusqlite::Connection; -use rusqlite_migration::{Migrations, M}; - -// MIGRATIONS -// ================================================================================================ - -lazy_static! { - static ref MIGRATIONS: Migrations<'static> = - Migrations::new(vec![M::up(include_str!("store.sql")),]); -} - -// PUBLIC FUNCTIONS -// ================================================================================================ - -pub fn update_to_latest(conn: &mut Connection) -> Result<(), StoreError> { - MIGRATIONS - .to_latest(conn) - .map_err(StoreError::MigrationError) -} diff --git a/src/store/migrations/miden_client_store/down.sql b/src/store/migrations/miden_client_store/down.sql new file mode 100644 index 000000000..d9a93fe9a --- /dev/null +++ b/src/store/migrations/miden_client_store/down.sql @@ -0,0 +1 @@ +-- This file should undo anything in `up.sql` diff --git a/src/store/store.sql b/src/store/migrations/miden_client_store/up.sql similarity index 100% rename from src/store/store.sql rename to src/store/migrations/miden_client_store/up.sql diff --git a/src/store/mod.rs b/src/store/mod.rs index 4b6aa47d0..a9a473ee8 100644 --- a/src/store/mod.rs +++ b/src/store/mod.rs @@ -1,3 +1,6 @@ +mod models; +pub mod schema; + use super::{errors::StoreError, AccountStub, ClientConfig}; use crypto::{utils::collections::BTreeMap, Word}; use objects::{ @@ -7,9 +10,17 @@ use objects::{ notes::{Note, NoteMetadata, RecordedNote}, Digest, Felt, }; -use rusqlite::{params, Connection}; +// use rusqlite::{params, Connection}; +use rusqlite::params; + +// from diesel guide, move to better place +use diesel::prelude::*; +use diesel::{sqlite::SqliteConnection, Connection}; +use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; +use miden_lib::*; +use models::*; -mod migrations; +const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./src/store/migrations"); // TYPES // ================================================================================================ @@ -36,7 +47,7 @@ type SerializedInputNoteParts = (String, String, String, String, u64, u64, u64, // ================================================================================================ pub struct Store { - db: Connection, + db: SqliteConnection, } impl Store { @@ -45,8 +56,8 @@ impl Store { /// Returns a new instance of [Store] instantiated with the specified configuration options. pub fn new(config: StoreConfig) -> Result { - let mut db = Connection::open(config.path).map_err(StoreError::ConnectionError)?; - migrations::update_to_latest(&mut db)?; + let mut db: SqliteConnection = SqliteConnection::establish(&config.path).unwrap(); // TODO: handle error + db.run_pending_migrations(MIGRATIONS).unwrap(); Ok(Self { db }) } @@ -54,115 +65,72 @@ impl Store { // ACCOUNTS // -------------------------------------------------------------------------------------------- - pub fn get_accounts(&self) -> Result, StoreError> { - let mut stmt = self - .db - .prepare("SELECT id, nonce, vault_root, storage_root, code_root FROM accounts") - .map_err(StoreError::QueryError)?; + pub fn get_accounts(&mut self) -> Result, StoreError> { + use schema::accounts::dsl::*; - let mut rows = stmt.query([]).map_err(StoreError::QueryError)?; - let mut result = Vec::new(); - while let Some(row) = rows.next().map_err(StoreError::QueryError)? { - // TODO: implement proper error handling and conversions - - let id: i64 = row.get(0).map_err(StoreError::QueryError)?; - let nonce: i64 = row.get(1).map_err(StoreError::QueryError)?; - - let vault_root: String = row.get(2).map_err(StoreError::QueryError)?; - let storage_root: String = row.get(3).map_err(StoreError::QueryError)?; - let code_root: String = row.get(4).map_err(StoreError::QueryError)?; - - result.push(AccountStub::new( - (id as u64) - .try_into() - .expect("Conversion from stored AccountID should not panic"), - (nonce as u64).into(), - serde_json::from_str(&vault_root).map_err(StoreError::DataDeserializationError)?, - serde_json::from_str(&storage_root) - .map_err(StoreError::DataDeserializationError)?, - serde_json::from_str(&code_root).map_err(StoreError::DataDeserializationError)?, - )); - } + Ok(accounts + .select(Accounts::as_select()) + .load(&mut self.db) + .unwrap() // TODO: handle unwrap + .iter() + .map(|a| a.to_account_stub().unwrap()) // TODO: handle unwrap + .collect()) + } + + pub fn insert_account_with_metadata(&mut self, account: &Account) -> Result<(), StoreError> { + // make this atomic + self.insert_account_code(account.code())?; + self.insert_account_storage(account.storage())?; + self.insert_account_vault(account.vault())?; + self.insert_account(&account)?; - Ok(result) + Ok(()) } - pub fn insert_account(&self, account: &Account) -> Result<(), StoreError> { - let id: u64 = account.id().into(); - let code_root = serde_json::to_string(&account.code().root()) - .map_err(StoreError::InputSerializationError)?; - let storage_root = serde_json::to_string(&account.storage().root()) - .map_err(StoreError::InputSerializationError)?; - let vault_root = serde_json::to_string(&account.vault().commitment()) - .map_err(StoreError::InputSerializationError)?; - - self.db.execute( - "INSERT INTO accounts (id, code_root, storage_root, vault_root, nonce, committed) VALUES (?, ?, ?, ?, ?, ?)", - params![ - id as i64, - code_root, - storage_root, - vault_root, - account.nonce().inner() as i64, - account.is_on_chain(), - ], - ) - .map(|_| ()) - .map_err(StoreError::QueryError) + pub fn insert_account(&mut self, account: &Account) -> Result<(), StoreError> { + use schema::accounts; + + let account = NewAccount::from_account(account).unwrap(); + diesel::insert_into(accounts::table) + .values(account) + .returning(Accounts::as_returning()) + .get_result(&mut self.db) + .map_err(StoreError::QueryError)?; + + Ok(()) } - pub fn insert_account_code(&self, account_code: &AccountCode) -> Result<(), StoreError> { - let code_root = serde_json::to_string(&account_code.root()) - .map_err(StoreError::InputSerializationError)?; - let code = serde_json::to_string(account_code.procedures()) - .map_err(StoreError::InputSerializationError)?; - let module = account_code.module().to_bytes(AstSerdeOptions { - serialize_imports: true, - }); - - self.db - .execute( - "INSERT INTO account_code (root, procedures, module) VALUES (?, ?, ?)", - params![code_root, code, module,], - ) - .map(|_| ()) - .map_err(StoreError::QueryError) + pub fn insert_account_code(&mut self, account_code: &AccountCode) -> Result<(), StoreError> { + let new_account_code = NewAccountCode::from_account_code(account_code).unwrap(); + diesel::insert_into(schema::account_code::table) + .values(new_account_code) + .execute(&mut self.db) + .map_err(StoreError::QueryError)?; + + Ok(()) } pub fn insert_account_storage( - &self, + &mut self, account_storage: &AccountStorage, ) -> Result<(), StoreError> { - let storage_root = serde_json::to_string(&account_storage.root()) - .map_err(StoreError::InputSerializationError)?; - - let storage_slots: BTreeMap = account_storage.slots().leaves().collect(); - let storage_slots = - serde_json::to_string(&storage_slots).map_err(StoreError::InputSerializationError)?; - - self.db - .execute( - "INSERT INTO account_storage (root, slots) VALUES (?, ?)", - params![storage_root, storage_slots], - ) - .map(|_| ()) - .map_err(StoreError::QueryError) - } + let new_account_storage = NewAccountStorage::from_account_storage(account_storage).unwrap(); + diesel::insert_into(schema::account_storage::table) + .values(new_account_storage) + .execute(&mut self.db) + .map_err(StoreError::QueryError)?; - pub fn insert_account_vault(&self, account_vault: &AccountVault) -> Result<(), StoreError> { - let vault_root = serde_json::to_string(&account_vault.commitment()) - .map_err(StoreError::InputSerializationError)?; + Ok(()) + } - let assets: Vec = account_vault.assets().collect(); - let assets = serde_json::to_string(&assets).map_err(StoreError::InputSerializationError)?; + pub fn insert_account_vault(&mut self, account_vault: &AccountVault) -> Result<(), StoreError> { + let new_account_vault = NewAccountVault::from_account_vault(account_vault).unwrap(); + diesel::insert_into(schema::account_vaults::table) + .values(new_account_vault) + .execute(&mut self.db) + .map_err(StoreError::QueryError)?; - self.db - .execute( - "INSERT INTO account_vaults (root, assets) VALUES (?, ?)", - params![vault_root, assets], - ) - .map(|_| ()) - .map_err(StoreError::QueryError) + Ok(()) } // NOTES @@ -170,85 +138,88 @@ impl Store { /// Retrieves the input notes from the database pub fn get_input_notes(&self) -> Result, StoreError> { - const QUERY: &str = "SELECT script, inputs, vault, serial_num, sender_id, tag, num_assets, inclusion_proof FROM input_notes"; - - self.db - .prepare(QUERY) - .map_err(StoreError::QueryError)? - .query_map([], parse_input_note_columns) - .expect("no binding parameters used in query") - .map(|result| { - result - .map_err(StoreError::ColumnParsingError) - .and_then(parse_input_note) - }) - .collect::, _>>() + // const QUERY: &str = "SELECT script, inputs, vault, serial_num, sender_id, tag, num_assets, inclusion_proof FROM input_notes"; + + // self.db + // .prepare(QUERY) + // .map_err(StoreError::QueryError)? + // .query_map([], parse_input_note_columns) + // .expect("no binding parameters used in query") + // .map(|result| { + // result + // .map_err(StoreError::ColumnParsingError) + // .and_then(parse_input_note) + // }) + // .collect::, _>>() + todo!() } /// Retrieves the input note with the specified hash from the database - pub fn get_input_note_by_hash(&self, hash: Digest) -> Result { - let query_hash = - serde_json::to_string(&hash).map_err(StoreError::InputSerializationError)?; - const QUERY: &str = "SELECT script, inputs, vault, serial_num, sender_id, tag, num_assets, inclusion_proof FROM input_notes WHERE hash = ?"; - - self.db - .prepare(QUERY) - .map_err(StoreError::QueryError)? - .query_map(params![query_hash.to_string()], parse_input_note_columns) - .map_err(StoreError::QueryError)? - .map(|result| { - result - .map_err(StoreError::ColumnParsingError) - .and_then(parse_input_note) - }) - .next() - .ok_or(StoreError::InputNoteNotFound(hash))? + pub fn get_input_note_by_hash(&self, _hash: Digest) -> Result { + // let query_hash = + // serde_json::to_string(&hash).map_err(StoreError::InputSerializationError)?; + // const QUERY: &str = "SELECT script, inputs, vault, serial_num, sender_id, tag, num_assets, inclusion_proof FROM input_notes WHERE hash = ?"; + + // self.db + // .prepare(QUERY) + // .map_err(StoreError::QueryError)? + // .query_map(params![query_hash.to_string()], parse_input_note_columns) + // .map_err(StoreError::QueryError)? + // .map(|result| { + // result + // .map_err(StoreError::ColumnParsingError) + // .and_then(parse_input_note) + // }) + // .next() + // .ok_or(StoreError::InputNoteNotFound(hash))? + todo!() } /// Inserts the provided input note into the database - pub fn insert_input_note(&self, recorded_note: &RecordedNote) -> Result<(), StoreError> { - let ( - hash, - nullifier, - script, - vault, - inputs, - serial_num, - sender_id, - tag, - num_assets, - inclusion_proof, - recipients, - status, - commit_height, - ) = serialize_input_note(recorded_note)?; - - const QUERY: &str = "\ - INSERT INTO input_notes - (hash, nullifier, script, vault, inputs, serial_num, sender_id, tag, num_assets, inclusion_proof, recipients, status, commit_height) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; - - self.db - .execute( - QUERY, - params![ - hash, - nullifier, - script, - vault, - inputs, - serial_num, - sender_id, - tag, - num_assets, - inclusion_proof, - recipients, - status, - commit_height - ], - ) - .map_err(StoreError::QueryError) - .map(|_| ()) + pub fn insert_input_note(&self, _recorded_note: &RecordedNote) -> Result<(), StoreError> { + // let ( + // hash, + // nullifier, + // script, + // vault, + // inputs, + // serial_num, + // sender_id, + // tag, + // num_assets, + // inclusion_proof, + // recipients, + // status, + // commit_height, + // ) = serialize_input_note(recorded_note)?; + + // const QUERY: &str = "\ + // INSERT INTO input_notes + // (hash, nullifier, script, vault, inputs, serial_num, sender_id, tag, num_assets, inclusion_proof, recipients, status, commit_height) + // VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; + + // self.db + // .execute( + // QUERY, + // params![ + // hash, + // nullifier, + // script, + // vault, + // inputs, + // serial_num, + // sender_id, + // tag, + // num_assets, + // inclusion_proof, + // recipients, + // status, + // commit_height + // ], + // ) + // .map_err(StoreError::QueryError) + // .map(|_| ()) + todo!() } } @@ -363,12 +334,36 @@ fn serialize_input_note( #[cfg(test)] pub mod tests { + use diesel::{Connection, SqliteConnection}; + use miden_lib::assembler::assembler; + use mock::mock::account; use std::env::temp_dir; use uuid::Uuid; + use super::{Store, MIGRATIONS}; + use diesel_migrations::MigrationHarness; + pub fn create_test_store_path() -> std::path::PathBuf { let mut temp_file = temp_dir(); temp_file.push(format!("{}.sqlite3", Uuid::new_v4())); temp_file } + + fn create_test_store() -> Store { + let temp_file = create_test_store_path(); + let mut db = SqliteConnection::establish(temp_file.to_str().unwrap()).unwrap(); + db.run_pending_migrations(MIGRATIONS).unwrap(); + + Store { db } + } + + #[test] + pub fn insert_same_account_twice_fails() { + let mut store = create_test_store(); + let assembler = assembler(); + let account = account::mock_new_account(&assembler); + + assert!(store.insert_account_with_metadata(&account).is_ok()); + assert!(store.insert_account_with_metadata(&account).is_err()); + } } diff --git a/src/store/models.rs b/src/store/models.rs new file mode 100644 index 000000000..ab90e04ef --- /dev/null +++ b/src/store/models.rs @@ -0,0 +1,202 @@ +use std::collections::BTreeMap; + +use crypto::Word; +use diesel::prelude::*; +use objects::{ + accounts::{ + Account, AccountCode as AccountCodeObject, AccountStorage as AccountStorageObject, + AccountStub, AccountVault as AccountVaultObject, + }, + assembly::AstSerdeOptions, + assets::Asset, +}; + +use super::schema; + +// ACCOUNT CODE TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::account_code)] +pub struct AccountCode { + pub root: Vec, + pub procedures: Vec, + pub module: Vec, +} + +#[derive(Insertable)] +#[diesel(table_name = schema::account_code)] +pub struct NewAccountCode { + pub root: Vec, + pub procedures: Vec, + pub module: Vec, +} + +impl NewAccountCode { + pub fn from_account_code(account_code: &AccountCodeObject) -> Result { + Ok(NewAccountCode { + root: serde_json::to_string(&account_code.root()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + procedures: serde_json::to_string(&account_code.procedures()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + module: account_code.module().to_bytes(AstSerdeOptions { + serialize_imports: true, + }), + }) + } +} + +// ACCOUNT STORAGE TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::account_storage)] +pub struct AccountStorage { + pub root: Vec, + pub slots: Vec, +} + +#[derive(Insertable)] +#[diesel(table_name = schema::account_storage)] +pub struct NewAccountStorage { + pub root: Vec, + pub slots: Vec, +} + +impl NewAccountStorage { + pub fn from_account_storage( + account_storage: &AccountStorageObject, + ) -> Result { + let storage_slots: BTreeMap = account_storage.slots().leaves().collect(); + let slots = serde_json::to_string(&storage_slots).unwrap().into_bytes(); // TODO: remove unwraps + + Ok(NewAccountStorage { + root: serde_json::to_string(&account_storage.root()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + slots, + }) + } +} + +// ACCOUNT VAULTS TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::account_vaults)] +pub struct AccountVaults { + pub root: Vec, + pub assets: Vec, +} + +#[derive(Insertable)] +#[diesel(table_name = schema::account_vaults)] +pub struct NewAccountVault { + pub root: Vec, + pub assets: Vec, +} + +impl NewAccountVault { + pub fn from_account_vault(account_vault: &AccountVaultObject) -> Result { + let assets: Vec = account_vault.assets().collect(); + let assets = serde_json::to_string(&assets).unwrap().into_bytes(); + Ok(NewAccountVault { + root: serde_json::to_string(&account_vault.commitment()) + .unwrap() + .into_bytes(), + assets, + }) + } +} + +// ACCOUNT KEYS TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::account_keys)] +pub struct AccountKeys { + pub account_id: u64, + pub key_pair: Vec, +} + +// ACCOUNTS TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::accounts)] +pub struct Accounts { + pub id: i64, + pub code_root: Vec, + pub storage_root: Vec, + pub vault_root: Vec, + pub nonce: i64, + pub committed: bool, +} + +impl Accounts { + pub fn to_account_stub(&self) -> Result { + Ok(AccountStub::new( + (self.id as u64) + .try_into() + .expect("Conversion from stored AccountID should not panic"), + (self.nonce as u64).into(), + serde_json::from_str(&String::from_utf8(self.vault_root.clone()).unwrap()).unwrap(), // TODO: remove unwraps + serde_json::from_str(&String::from_utf8(self.storage_root.clone()).unwrap()).unwrap(), // TODO: remove unwraps + serde_json::from_str(&String::from_utf8(self.code_root.clone()).unwrap()).unwrap(), // TODO: remove unwraps + )) + } +} + +#[derive(Insertable)] +#[diesel(table_name = schema::accounts)] +pub struct NewAccount { + pub id: i64, + pub code_root: Vec, + pub storage_root: Vec, + pub vault_root: Vec, + pub nonce: i64, + pub committed: bool, +} + +impl NewAccount { + pub fn from_account(account: &Account) -> Result { + let id: u64 = account.id().into(); + Ok(NewAccount { + id: id as i64, + code_root: serde_json::to_string(&account.code().root()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + storage_root: serde_json::to_string(&account.storage().root()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + vault_root: serde_json::to_string(&account.vault().commitment()) + .unwrap() // TODO: remove unwraps + .into_bytes(), + nonce: account.nonce().inner() as i64, + committed: account.is_on_chain(), + }) + } +} + +// INPUT NOTES TABLE +// -------------------------------------------------------------------------------------------- + +#[derive(Queryable, Selectable)] +#[diesel(table_name = schema::input_notes)] +pub struct InputNotes { + pub hash: Vec, + pub nullifier: Vec, + pub script: Vec, + pub vault: Vec, + pub inputs: Vec, + pub serial_num: Vec, + pub sender_id: u64, + pub tag: u64, + pub num_assets: u64, + pub inclusion_proof: Vec, + pub recipients: Vec, + pub status: String, + pub commit_height: u64, +} diff --git a/src/store/schema.rs b/src/store/schema.rs new file mode 100644 index 000000000..cfc21b248 --- /dev/null +++ b/src/store/schema.rs @@ -0,0 +1,73 @@ +// @generated automatically by Diesel CLI. + +diesel::table! { + account_code (root) { + root -> Binary, + procedures -> Binary, + module -> Binary, + } +} + +diesel::table! { + account_keys (account_id) { + account_id -> BigInt, + key_pair -> Binary, + } +} + +diesel::table! { + account_storage (root) { + root -> Binary, + slots -> Binary, + } +} + +diesel::table! { + account_vaults (root) { + root -> Binary, + assets -> Binary, + } +} + +diesel::table! { + accounts (id) { + id -> BigInt, + code_root -> Binary, + storage_root -> Binary, + vault_root -> Binary, + nonce -> BigInt, + committed -> Bool, + } +} + +diesel::table! { + input_notes (hash) { + hash -> Binary, + nullifier -> Binary, + script -> Binary, + vault -> Binary, + inputs -> Binary, + serial_num -> Binary, + sender_id -> BigInt, + tag -> BigInt, + num_assets -> BigInt, + inclusion_proof -> Binary, + recipients -> Binary, + status -> Nullable, + commit_height -> BigInt, + } +} + +diesel::joinable!(account_keys -> accounts (account_id)); +diesel::joinable!(accounts -> account_code (code_root)); +diesel::joinable!(accounts -> account_storage (storage_root)); +diesel::joinable!(accounts -> account_vaults (vault_root)); + +diesel::allow_tables_to_appear_in_same_query!( + account_code, + account_keys, + account_storage, + account_vaults, + accounts, + input_notes, +);