diff --git a/libraries/chain/db_block.cpp b/libraries/chain/db_block.cpp index e9124594e..542254c5b 100644 --- a/libraries/chain/db_block.cpp +++ b/libraries/chain/db_block.cpp @@ -286,6 +286,8 @@ bool database::_push_block(const signed_block& new_block) */ processed_transaction database::push_transaction( const signed_transaction& trx, uint32_t skip ) { try { + // see https://github.com/bitshares/bitshares-core/issues/1573 + FC_ASSERT( fc::raw::pack_size( trx ) < (1024 * 1024), "Transaction exceeds maximum transaction size." ); processed_transaction result; detail::with_skip_flags( *this, skip, [&]() { @@ -703,6 +705,10 @@ processed_transaction database::_apply_transaction(const signed_transaction& trx FC_ASSERT( trx.expiration <= now + chain_parameters.maximum_time_until_expiration, "", ("trx.expiration",trx.expiration)("now",now)("max_til_exp",chain_parameters.maximum_time_until_expiration)); FC_ASSERT( now <= trx.expiration, "", ("now",now)("trx.exp",trx.expiration) ); + if ( !(skip & skip_block_size_check ) ) // don't waste time on replay + FC_ASSERT( head_block_time() <= HARDFORK_1002_TIME + || trx.get_packed_size() <= chain_parameters.maximum_transaction_size, + "Transaction exceeds maximum transaction size." ); } //Insert transaction into unique transactions database. diff --git a/libraries/chain/hardfork.d/1002.hf b/libraries/chain/hardfork.d/1002.hf new file mode 100644 index 000000000..0598729fe --- /dev/null +++ b/libraries/chain/hardfork.d/1002.hf @@ -0,0 +1,4 @@ +// added transaction size check +#ifndef HARDFORK_1002_TIME +#define HARDFORK_1002_TIME (fc::time_point_sec( 1566797400 )) //Monday, 26 August 2019 05:30:00 GMT +#endif diff --git a/libraries/chain/include/graphene/chain/protocol/transaction.hpp b/libraries/chain/include/graphene/chain/protocol/transaction.hpp index 4d529a277..0cbe785a9 100644 --- a/libraries/chain/include/graphene/chain/protocol/transaction.hpp +++ b/libraries/chain/include/graphene/chain/protocol/transaction.hpp @@ -113,6 +113,8 @@ namespace graphene { namespace chain { } void get_required_authorities( flat_set& active, flat_set& owner, vector& other )const; + + virtual uint64_t get_packed_size()const; }; /** diff --git a/libraries/chain/protocol/transaction.cpp b/libraries/chain/protocol/transaction.cpp index 5faf1c0a1..f25a356fe 100644 --- a/libraries/chain/protocol/transaction.cpp +++ b/libraries/chain/protocol/transaction.cpp @@ -59,6 +59,11 @@ void transaction::validate() const operation_validate(op); } +uint64_t transaction::get_packed_size() const +{ + return fc::raw::pack_size(*this); +} + graphene::chain::transaction_id_type graphene::chain::transaction::id() const { auto h = digest(); diff --git a/tests/tests/network_broadcast_api_tests.cpp b/tests/tests/network_broadcast_api_tests.cpp index 481654893..963892e25 100644 --- a/tests/tests/network_broadcast_api_tests.cpp +++ b/tests/tests/network_broadcast_api_tests.cpp @@ -10,6 +10,8 @@ #include #include #include +#include +#include #include "../common/database_fixture.hpp" @@ -418,4 +420,29 @@ BOOST_AUTO_TEST_CASE( check_passes_for_duplicated_betting_market_or_group ) } } +BOOST_AUTO_TEST_CASE( broadcast_transaction_too_large ) { + try { + + fc::ecc::private_key cid_key = fc::ecc::private_key::regenerate( fc::digest("key") ); + const account_id_type cid_id = create_account( "cid", cid_key.get_public_key() ).id; + fund( cid_id(db) ); + + auto nb_api = std::make_shared< graphene::app::network_broadcast_api >( app ); + + generate_blocks( HARDFORK_1002_TIME + 10 ); + + set_expiration( db, trx ); + transfer_operation trans; + trans.from = cid_id; + trans.to = account_id_type(); + trans.amount = asset(1); + for(int i = 0; i < 250; ++i ) + trx.operations.push_back( trans ); + sign( trx, cid_key ); + + BOOST_CHECK_THROW( nb_api->broadcast_transaction( trx ), fc::exception ); + + } FC_LOG_AND_RETHROW() +} + BOOST_AUTO_TEST_SUITE_END()