From d3ab03ac52320048fb88d81612a57897e33b47a7 Mon Sep 17 00:00:00 2001 From: Edgars Date: Sun, 10 Aug 2025 19:04:39 +0300 Subject: [PATCH 01/12] Fix: Improve transaction processing and logging This commit addresses several issues related to transaction processing and logging within the system. - It prevents errors when updating transaction statuses or consensus data after snapshot restores by skipping updates for non-existent transactions. - It enhances nonce handling by fetching the actual nonce from Hardhat, ensuring synchronization with the blockchain. If not connected it falls back to existing method. - It improves error handling during transaction forwarding, raising exceptions for nonce errors. - It truncates large transaction fields and contract states in logs (calldata, contract_code, state) unless in DEBUG mode, reducing log size and improving readability. - It updates gas estimation to use zkSync Era's gas limit and updates hardhat config to be the same. - It ensures LLM and Web modules are shutdown correctly and gracefully. - It updates the models to use the latest versions of openai, google, xai, anthropic and heuristai. --- backend/consensus/base.py | 37 ++++++++++-- .../transactions_processor.py | 59 +++++++++++++++---- backend/database_handler/types.py | 6 +- .../anthropic_claude-4-opus.json | 16 +++++ .../anthropic_claude-4-sonnet.json | 16 +++++ .../google_gemini-2.5-flash-lite.json | 13 ++++ .../google_gemini-2.5-flash.json | 13 ++++ ...ristai_mistralaimixtral-8x7b-instruct.json | 13 ++++ ...-4.json => openai_gpt-4-1106-preview.json} | 2 +- .../openai_gpt-4.1-mini.json | 13 ++++ .../openai_gpt-4.1-nano.json | 13 ++++ .../default_providers/openai_gpt-4.1.json | 13 ++++ .../default_providers/openai_gpt-5-mini.json | 13 ++++ .../default_providers/openai_gpt-5-nano.json | 13 ++++ .../default_providers/openai_gpt-5.json | 13 ++++ ..._grok-2-1212.json => xai_grok-3-mini.json} | 4 +- .../default_providers/xai_grok-3.json | 13 ++++ .../default_providers/xai_grok-4-0709.json | 13 ++++ .../node/create_nodes/providers_schema.json | 9 +-- backend/node/types.py | 30 ++++++++-- backend/protocol_rpc/endpoints.py | 9 ++- backend/rollup/consensus_service.py | 27 +++------ backend/validators/llm.py | 19 +++++- backend/validators/web.py | 19 +++++- .../src/assets/schemas/providers_schema.json | 9 +-- hardhat/hardhat.config.js | 2 + 26 files changed, 349 insertions(+), 58 deletions(-) create mode 100644 backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json create mode 100644 backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json create mode 100644 backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json create mode 100644 backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json create mode 100644 backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json rename backend/node/create_nodes/default_providers/{openai_gpt-4.json => openai_gpt-4-1106-preview.json} (99%) create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-4.1.json create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-5-mini.json create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-5-nano.json create mode 100644 backend/node/create_nodes/default_providers/openai_gpt-5.json rename backend/node/create_nodes/default_providers/{xai_grok-2-1212.json => xai_grok-3-mini.json} (88%) create mode 100644 backend/node/create_nodes/default_providers/xai_grok-3.json create mode 100644 backend/node/create_nodes/default_providers/xai_grok-4-0709.json diff --git a/backend/consensus/base.py b/backend/consensus/base.py index 52820876c..fbfcdcb16 100644 --- a/backend/consensus/base.py +++ b/backend/consensus/base.py @@ -1706,6 +1706,22 @@ async def handle(self, context): not context.transaction.appeal_leader_timeout and not context.transaction.appeal_undetermined ): + # Truncate large fields unless in DEBUG mode + truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" + transaction_data = context.transaction.to_dict() + if truncate and transaction_data.get("data") and isinstance(transaction_data["data"], dict): + # Make a copy of the data dict to avoid modifying the original + import copy + transaction_data = copy.deepcopy(transaction_data) + # Truncate calldata if present + if "calldata" in transaction_data["data"]: + if isinstance(transaction_data["data"]["calldata"], str) and len(transaction_data["data"]["calldata"]) > 100: + transaction_data["data"]["calldata"] = f"{transaction_data['data']['calldata'][:100]}... ({len(transaction_data['data']['calldata'])} chars)" + # Truncate contract_code if present + if "contract_code" in transaction_data["data"]: + if isinstance(transaction_data["data"]["contract_code"], str) and len(transaction_data["data"]["contract_code"]) > 100: + transaction_data["data"]["contract_code"] = f"{transaction_data['data']['contract_code'][:100]}... ({len(transaction_data['data']['contract_code'])} chars)" + context.msg_handler.send_message( LogEvent( "consensus_event", @@ -1714,7 +1730,7 @@ async def handle(self, context): "Executing transaction", { "transaction_hash": context.transaction.hash, - "transaction": context.transaction.to_dict(), + "transaction": transaction_data, }, transaction_hash=context.transaction.hash, ) @@ -2377,6 +2393,8 @@ async def handle(self, context): ) # Send a message indicating consensus was reached + # Truncate large fields unless in DEBUG mode + truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" context.msg_handler.send_message( LogEvent( "consensus_event", @@ -2385,7 +2403,7 @@ async def handle(self, context): "Reached consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict(), + "consensus_data": context.consensus_data.to_dict(truncate_large_fields=truncate), }, transaction_hash=context.transaction.hash, ) @@ -2425,13 +2443,22 @@ async def handle(self, context): context.contract_processor.register_contract(new_contract) # Send a message indicating successful contract deployment + # Truncate contract state for logging unless in DEBUG mode + truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" + log_data = new_contract if not truncate else { + "id": new_contract["id"], + "data": { + "state": "" if new_contract.get("data", {}).get("state") else None, + "code": f"<{len(new_contract.get('data', {}).get('code', ''))} chars>" if new_contract.get("data", {}).get("code") else None, + } + } context.msg_handler.send_message( LogEvent( "deployed_contract", EventType.SUCCESS, EventScope.GENVM, "Contract deployed", - new_contract, + log_data, transaction_hash=context.transaction.hash, ) ) @@ -2517,6 +2544,8 @@ async def handle(self, context): None: The transaction remains in an undetermined state. """ # Send a message indicating consensus failure + # Truncate large fields unless in DEBUG mode + truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" context.msg_handler.send_message( LogEvent( "consensus_event", @@ -2525,7 +2554,7 @@ async def handle(self, context): "Failed to reach consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict(), + "consensus_data": context.consensus_data.to_dict(truncate_large_fields=truncate), }, transaction_hash=context.transaction.hash, ) diff --git a/backend/database_handler/transactions_processor.py b/backend/database_handler/transactions_processor.py index cad8e56cd..fe88738b9 100644 --- a/backend/database_handler/transactions_processor.py +++ b/backend/database_handler/transactions_processor.py @@ -576,8 +576,14 @@ def update_transaction_status( update_current_status_changes: bool = True, ): transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + + # If transaction doesn't exist (e.g., after snapshot restore), skip update + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping status update") + return + transaction.status = new_status if update_current_status_changes: @@ -601,18 +607,30 @@ def set_transaction_result( self, transaction_hash: str, consensus_data: dict | None ): transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + + # If transaction doesn't exist (e.g., after snapshot restore), skip update + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping result update") + return + transaction.consensus_data = consensus_data self.session.commit() def get_transaction_count(self, address: str) -> int: - count = ( - self.session.query(Transactions) - .filter(Transactions.from_address == address) - .count() - ) - return count + # Get the actual nonce from Hardhat instead of counting DB transactions + # This ensures we're always in sync with the blockchain's nonce tracking + if self.web3.is_connected(): + return self.web3.eth.get_transaction_count(address) + else: + # Fallback to counting transactions if not connected + count = ( + self.session.query(Transactions) + .filter(Transactions.from_address == address) + .count() + ) + return count def get_transactions_for_address( self, @@ -675,16 +693,22 @@ def set_transaction_appeal_failed(self, transaction_hash: str, appeal_failed: in if appeal_failed < 0: raise ValueError("appeal_failed must be a non-negative integer") transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_failed update") + return transaction.appeal_failed = appeal_failed def set_transaction_appeal_undetermined( self, transaction_hash: str, appeal_undetermined: bool ): transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_undetermined update") + return transaction.appeal_undetermined = appeal_undetermined def get_highest_timestamp(self) -> int: @@ -817,8 +841,11 @@ def set_transaction_timestamp_appeal( def set_transaction_appeal_processing_time(self, transaction_hash: str): transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_processing_time update") + return transaction.appeal_processing_time += ( round(time.time()) - transaction.timestamp_appeal ) @@ -981,8 +1008,11 @@ def set_transaction_appeal_leader_timeout( self, transaction_hash: str, appeal_leader_timeout: bool ) -> bool: transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_leader_timeout update") + return False transaction.appeal_leader_timeout = appeal_leader_timeout self.session.commit() return appeal_leader_timeout @@ -998,8 +1028,11 @@ def set_transaction_appeal_validators_timeout( self, transaction_hash: str, appeal_validators_timeout: bool ) -> bool: transaction = ( - self.session.query(Transactions).filter_by(hash=transaction_hash).one() + self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) + if not transaction: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_validators_timeout update") + return False transaction.appeal_validators_timeout = appeal_validators_timeout self.session.commit() return appeal_validators_timeout diff --git a/backend/database_handler/types.py b/backend/database_handler/types.py index 37306df4b..cd0dd366b 100644 --- a/backend/database_handler/types.py +++ b/backend/database_handler/types.py @@ -11,15 +11,15 @@ class ConsensusData: ) # first item is leader function, second item is validator function validators: list[Receipt] | None = None - def to_dict(self): + def to_dict(self, truncate_large_fields=False): return { "votes": self.votes, "leader_receipt": ( - [receipt.to_dict() for receipt in self.leader_receipt] + [receipt.to_dict(truncate_large_fields=truncate_large_fields) for receipt in self.leader_receipt] if self.leader_receipt else None ), - "validators": [receipt.to_dict() for receipt in self.validators], + "validators": [receipt.to_dict(truncate_large_fields=truncate_large_fields) for receipt in self.validators] if self.validators else [], } @classmethod diff --git a/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json b/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json new file mode 100644 index 000000000..df713fdff --- /dev/null +++ b/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json @@ -0,0 +1,16 @@ +{ + "provider": "anthropic", + "plugin": "anthropic", + "model": "claude-opus-4-20250514", + "config": { + "temperature": 0.75, + "max_tokens": 500, + "top_k": 40, + "top_p": 0.9, + "timeout": 10 + }, + "plugin_config": { + "api_key_env_var": "ANTHROPIC_API_KEY", + "api_url": "https://api.anthropic.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json b/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json new file mode 100644 index 000000000..ae0cf4876 --- /dev/null +++ b/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json @@ -0,0 +1,16 @@ +{ + "provider": "anthropic", + "plugin": "anthropic", + "model": "claude-sonnet-4-20250514", + "config": { + "temperature": 0.75, + "max_tokens": 500, + "top_k": 40, + "top_p": 0.9, + "timeout": 10 + }, + "plugin_config": { + "api_key_env_var": "ANTHROPIC_API_KEY", + "api_url": "https://api.anthropic.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json new file mode 100644 index 000000000..9fd3c031a --- /dev/null +++ b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json @@ -0,0 +1,13 @@ +{ + "provider": "google", + "plugin": "google", + "model": "gemini-2.5-flash-lite", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "GEMINI_API_KEY", + "api_url": "https://generativelanguage.googleapis.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json new file mode 100644 index 000000000..21527fb73 --- /dev/null +++ b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json @@ -0,0 +1,13 @@ +{ + "provider": "google", + "plugin": "google", + "model": "gemini-2.5-flash", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "GEMINI_API_KEY", + "api_url": "https://generativelanguage.googleapis.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json b/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json new file mode 100644 index 000000000..c4d376ec0 --- /dev/null +++ b/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json @@ -0,0 +1,13 @@ +{ + "provider": "heuristai", + "plugin": "openai-compatible", + "model": "mistralai/mixtral-8x7b-instruct", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "HEURISTAIAPIKEY", + "api_url": "https://llm-gateway.heurist.xyz" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.json b/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json similarity index 99% rename from backend/node/create_nodes/default_providers/openai_gpt-4.json rename to backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json index f72bbc370..c4f8dcba0 100644 --- a/backend/node/create_nodes/default_providers/openai_gpt-4.json +++ b/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json @@ -10,4 +10,4 @@ "api_key_env_var": "OPENAIKEY", "api_url": "https://api.openai.com" } -} +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json new file mode 100644 index 000000000..69c3efa2d --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-4.1-mini", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json new file mode 100644 index 000000000..8f45dd25c --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-4.1-nano", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1.json new file mode 100644 index 000000000..ef26adecd --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-4.1", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-5-mini.json b/backend/node/create_nodes/default_providers/openai_gpt-5-mini.json new file mode 100644 index 000000000..74e122be7 --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-5-mini.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-5-mini", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-5-nano.json b/backend/node/create_nodes/default_providers/openai_gpt-5-nano.json new file mode 100644 index 000000000..5ec4a26d6 --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-5-nano.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-5-nano", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-5.json b/backend/node/create_nodes/default_providers/openai_gpt-5.json new file mode 100644 index 000000000..0f6e1a18d --- /dev/null +++ b/backend/node/create_nodes/default_providers/openai_gpt-5.json @@ -0,0 +1,13 @@ +{ + "provider": "openai", + "plugin": "openai-compatible", + "model": "gpt-5", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "OPENAIKEY", + "api_url": "https://api.openai.com" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-2-1212.json b/backend/node/create_nodes/default_providers/xai_grok-3-mini.json similarity index 88% rename from backend/node/create_nodes/default_providers/xai_grok-2-1212.json rename to backend/node/create_nodes/default_providers/xai_grok-3-mini.json index 03db1cd13..b758aff15 100644 --- a/backend/node/create_nodes/default_providers/xai_grok-2-1212.json +++ b/backend/node/create_nodes/default_providers/xai_grok-3-mini.json @@ -1,7 +1,7 @@ { "provider": "xai", "plugin": "openai-compatible", - "model": "grok-2-1212", + "model": "grok-3-mini", "config": { "temperature": 0.75, "max_tokens": 500 @@ -10,4 +10,4 @@ "api_key_env_var": "XAI_API_KEY", "api_url": "https://api.x.ai" } -} +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-3.json b/backend/node/create_nodes/default_providers/xai_grok-3.json new file mode 100644 index 000000000..3683fa1d2 --- /dev/null +++ b/backend/node/create_nodes/default_providers/xai_grok-3.json @@ -0,0 +1,13 @@ +{ + "provider": "xai", + "plugin": "openai-compatible", + "model": "grok-3", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "XAI_API_KEY", + "api_url": "https://api.x.ai" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-4-0709.json b/backend/node/create_nodes/default_providers/xai_grok-4-0709.json new file mode 100644 index 000000000..78f83d06d --- /dev/null +++ b/backend/node/create_nodes/default_providers/xai_grok-4-0709.json @@ -0,0 +1,13 @@ +{ + "provider": "xai", + "plugin": "openai-compatible", + "model": "grok-4-0709", + "config": { + "temperature": 0.75, + "max_tokens": 500 + }, + "plugin_config": { + "api_key_env_var": "XAI_API_KEY", + "api_url": "https://api.x.ai" + } +} \ No newline at end of file diff --git a/backend/node/create_nodes/providers_schema.json b/backend/node/create_nodes/providers_schema.json index e110ed4df..aa36ee470 100644 --- a/backend/node/create_nodes/providers_schema.json +++ b/backend/node/create_nodes/providers_schema.json @@ -55,7 +55,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["grok-2-1212"] + "enum": ["grok-4-0709", "grok-3", "grok-3-mini"] } } } @@ -72,7 +72,7 @@ "const": "google" }, "model": { - "enum": ["gemini-2.0-flash-lite-001"] + "enum": ["gemini-2.0-flash-lite-001", "gemini-2.5-flash-lite", "gemini-2.5-flash"] } } } @@ -92,6 +92,7 @@ "enum": [ "deepseek/deepseek-v3", "mistralai/mixtral-8x22b-instruct", + "mistralai/mixtral-8x7b-instruct", "meta-llama/llama-3.3-70b-instruct" ] } @@ -110,7 +111,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["gpt-4-1106-preview", "gpt-4o"] + "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1"] } } } @@ -127,7 +128,7 @@ "const": "anthropic" }, "model": { - "enum": ["claude-3-7-sonnet-20250219", "claude-3-5-haiku-20241022"] + "enum": ["claude-3-7-sonnet-20250219", "claude-3-5-haiku-20241022", "claude-sonnet-4-20250514", "claude-opus-4-20250514"] } } } diff --git a/backend/node/types.py b/backend/node/types.py index 1ba7ffd5b..4dccb1e06 100644 --- a/backend/node/types.py +++ b/backend/node/types.py @@ -193,15 +193,37 @@ class Receipt: pending_transactions: Iterable[PendingTransaction] = () genvm_result: dict[str, str] | None = None - def to_dict(self): + def to_dict(self, truncate_large_fields=False): + """Convert Receipt to dict. + + Args: + truncate_large_fields: If True, truncate large fields for logging + """ + def truncate_if_needed(value, field_name): + if not truncate_large_fields: + return value + + # Fields to truncate when in summary mode + if field_name in ['calldata', 'result']: + if isinstance(value, str) and len(value) > 100: + return f"{value[:100]}... ({len(value)} chars)" + elif field_name == 'contract_state': + if value and len(str(value)) > 100: + return f"<{len(value)} entries, truncated>" + + return value + + result = base64.b64encode(self.result).decode("ascii") + calldata = str(base64.b64encode(self.calldata), encoding="ascii") + return { "vote": self.vote.value if self.vote else None, "execution_result": self.execution_result.value, - "result": base64.b64encode(self.result).decode("ascii"), - "calldata": str(base64.b64encode(self.calldata), encoding="ascii"), + "result": truncate_if_needed(result, 'result'), + "calldata": truncate_if_needed(calldata, 'calldata'), "gas_used": self.gas_used, "mode": self.mode.value, - "contract_state": self.contract_state, + "contract_state": truncate_if_needed(self.contract_state, 'contract_state'), "node_config": self.node_config, "eq_outputs": self.eq_outputs, "pending_transactions": [ diff --git a/backend/protocol_rpc/endpoints.py b/backend/protocol_rpc/endpoints.py index 33c71a8a3..5d58ffbab 100644 --- a/backend/protocol_rpc/endpoints.py +++ b/backend/protocol_rpc/endpoints.py @@ -702,7 +702,9 @@ def send_raw_transaction( decoded_rollup_transaction = transactions_parser.decode_signed_transaction( signed_rollup_transaction ) - print("DECODED ROLLUP TRANSACTION", decoded_rollup_transaction) + # Debug logging - only in DEBUG mode + if os.environ.get("LOG_LEVEL", "INFO").upper() == "DEBUG": + print("DECODED ROLLUP TRANSACTION", decoded_rollup_transaction) # Validate transaction if decoded_rollup_transaction is None: @@ -879,8 +881,9 @@ def get_gas_price() -> str: def get_gas_estimate(data: Any) -> str: - gas_price_in_wei = 30 * 10**6 - return hex(gas_price_in_wei) + # Use zkSync Era's gas limit: 2^32 - 1 (4,294,967,295) + gas_limit = 0xFFFFFFFF # 4,294,967,295 + return hex(gas_limit) def get_transaction_receipt( diff --git a/backend/rollup/consensus_service.py b/backend/rollup/consensus_service.py index 89033e4ce..27e685646 100644 --- a/backend/rollup/consensus_service.py +++ b/backend/rollup/consensus_service.py @@ -175,33 +175,24 @@ def add_transaction( if "nonce too high" in error_str.lower() else "nonce_too_low" if "nonce too low" in error_str.lower() else None ) + # For nonce errors with pre-signed transactions, we can't fix them + # The transaction would need to be re-signed with the correct nonce if error_type: - # Extract expected and current nonce from error message match = re.search( r"Expected nonce to be (\d+) but got (\d+)", error_str ) if match: + expected_nonce = int(match.group(1)) current_nonce = int(match.group(2)) - - # Set the nonce to the expected value - print( - f"[CONSENSUS_SERVICE]: Setting nonce for {from_address} to {current_nonce}" - ) - self.web3.provider.make_request( - "hardhat_setNonce", [from_address, hex(current_nonce)] - ) - - if retry: - return self.add_transaction( - transaction, from_address, retry=False - ) - else: print( - f"[CONSENSUS_SERVICE]: Could not parse nonce from error message: {error_str}" + f"[CONSENSUS_SERVICE]: Nonce mismatch - expected {expected_nonce}, got {current_nonce}. " + f"Transaction needs to be re-signed with correct nonce." ) print(f"[CONSENSUS_SERVICE]: Error forwarding transaction: {error_str}") - return None + # Raise the exception to be handled by the caller instead of returning None + # This will ensure the transaction fails explicitly rather than getting stuck in PENDING + raise Exception(f"Transaction failed: {error_str}") def emit_transaction_event(self, event_name: str, account: dict, *args): """ @@ -237,7 +228,7 @@ def emit_transaction_event(self, event_name: str, account: dict, *args): tx = event_function(*args).build_transaction( { "from": account_address, - "gas": 50000000, + "gas": 0xFFFFFFFF, # 2^32 - 1 (4,294,967,295) - zkSync Era limit "gasPrice": 0, "nonce": self.web3.eth.get_transaction_count(account_address), } diff --git a/backend/validators/llm.py b/backend/validators/llm.py index c9e9f86ee..11b7a4f69 100644 --- a/backend/validators/llm.py +++ b/backend/validators/llm.py @@ -62,11 +62,28 @@ def __del__(self): async def stop(self): if self._process is not None: + print(f"[LLMModule] Stopping process (PID: {self._process.pid})") try: self._process.send_signal(signal.SIGINT) except ProcessLookupError: pass - await self._process.wait() + + try: + # Wait for process to terminate with a timeout + await asyncio.wait_for(self._process.wait(), timeout=5.0) + print(f"[LLMModule] Process terminated gracefully") + except asyncio.TimeoutError: + print(f"[LLMModule] Process didn't terminate with SIGINT, trying SIGKILL") + # If SIGINT didn't work, try SIGKILL + try: + self._process.send_signal(signal.SIGKILL) + await asyncio.wait_for(self._process.wait(), timeout=2.0) + print(f"[LLMModule] Process terminated with SIGKILL") + except (asyncio.TimeoutError, ProcessLookupError): + print(f"[LLMModule] Process termination failed, continuing anyway") + # If still hanging, just give up and set to None + pass + self._process = None async def restart(self): diff --git a/backend/validators/web.py b/backend/validators/web.py index 896b3163f..0a7da0af3 100644 --- a/backend/validators/web.py +++ b/backend/validators/web.py @@ -60,11 +60,28 @@ async def restart(self): async def stop(self): if self._process is not None: + print(f"[WebModule] Stopping process (PID: {self._process.pid})") try: self._process.send_signal(signal.SIGINT) except ProcessLookupError: pass - await self._process.wait() + + try: + # Wait for process to terminate with a timeout + await asyncio.wait_for(self._process.wait(), timeout=5.0) + print(f"[WebModule] Process terminated gracefully") + except asyncio.TimeoutError: + print(f"[WebModule] Process didn't terminate with SIGINT, trying SIGKILL") + # If SIGINT didn't work, try SIGKILL + try: + self._process.send_signal(signal.SIGKILL) + await asyncio.wait_for(self._process.wait(), timeout=2.0) + print(f"[WebModule] Process terminated with SIGKILL") + except (asyncio.TimeoutError, ProcessLookupError): + print(f"[WebModule] Process termination failed, continuing anyway") + # If still hanging, just give up and set to None + pass + self._process = None async def verify_for_read(self): diff --git a/frontend/src/assets/schemas/providers_schema.json b/frontend/src/assets/schemas/providers_schema.json index 9220a53e6..947f5f40d 100644 --- a/frontend/src/assets/schemas/providers_schema.json +++ b/frontend/src/assets/schemas/providers_schema.json @@ -55,7 +55,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["grok-2-1212"] + "enum": ["grok-4-0709", "grok-3", "grok-3-mini"] } } } @@ -72,7 +72,7 @@ "const": "google" }, "model": { - "enum": ["gemini-2.0-flash-lite-001"] + "enum": ["gemini-2.0-flash-lite-001", "gemini-2.5-flash-lite", "gemini-2.5-flash"] } } } @@ -92,6 +92,7 @@ "enum": [ "deepseek/deepseek-v3", "mistralai/mixtral-8x22b-instruct", + "mistralai/mixtral-8x7b-instruct", "meta-llama/llama-3.3-70b-instruct" ] } @@ -110,7 +111,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["gpt-4-1106-preview", "gpt-4o"] + "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1"] } } } @@ -127,7 +128,7 @@ "const": "anthropic" }, "model": { - "enum": ["claude-3-7-sonnet-20250219", "claude-3-5-haiku-20241022"] + "enum": ["claude-3-7-sonnet-20250219", "claude-3-5-haiku-20241022", "claude-sonnet-4-20250514", "claude-opus-4-20250514"] } } } diff --git a/hardhat/hardhat.config.js b/hardhat/hardhat.config.js index c3ec45bff..e004f8be1 100644 --- a/hardhat/hardhat.config.js +++ b/hardhat/hardhat.config.js @@ -24,6 +24,7 @@ module.exports = { }, networks: { hardhat: { + gas: 0xffffffff, // 2^32 - 1 (4,294,967,295) - same as zkSync Era mining: { auto: true, interval: 0, @@ -38,6 +39,7 @@ module.exports = { }, genlayer_network: { url: "http://localhost:8545", + gas: 0xffffffff, // 2^32 - 1 (4,294,967,295) - same as zkSync Era mining: { auto: true, interval: 0, From 7577d2ae4c87753ed38a98c2a7b6f59ad5bc1673 Mon Sep 17 00:00:00 2001 From: Edgars Date: Sun, 10 Aug 2025 19:29:05 +0300 Subject: [PATCH 02/12] Fix: Improves transaction handling and stability This commit addresses several issues related to transaction processing and overall system stability: - Enhances transaction count retrieval by normalizing addresses, fetching from RPC with pending state, and gracefully falling back to the database. Includes error handling for RPC connection issues. - Prevents errors when updating appeal processing time by checking for null `timestamp_appeal` values. - Implements more robust process termination for LLM and Web modules, ensuring cleanup and preventing resource leaks. - Adds custom exception classes for nonce errors in the Consensus Service, improving error handling and reporting. - Expands allowed LLM models in provider schemas. These changes improve the reliability and accuracy of transaction-related operations and enhance the system's resilience to external factors. --- .../transactions_processor.py | 53 +++++++++++----- .../node/create_nodes/providers_schema.json | 2 +- backend/rollup/consensus_service.py | 62 +++++++++++++++---- backend/validators/llm.py | 43 ++++++++----- backend/validators/web.py | 43 ++++++++----- .../src/assets/schemas/providers_schema.json | 2 +- 6 files changed, 142 insertions(+), 63 deletions(-) diff --git a/backend/database_handler/transactions_processor.py b/backend/database_handler/transactions_processor.py index fe88738b9..8ec42e4bd 100644 --- a/backend/database_handler/transactions_processor.py +++ b/backend/database_handler/transactions_processor.py @@ -619,18 +619,36 @@ def set_transaction_result( self.session.commit() def get_transaction_count(self, address: str) -> int: + # Normalize address to checksum format + try: + checksum_address = self.web3.to_checksum_address(address) + except: + checksum_address = address + # Get the actual nonce from Hardhat instead of counting DB transactions # This ensures we're always in sync with the blockchain's nonce tracking - if self.web3.is_connected(): - return self.web3.eth.get_transaction_count(address) - else: - # Fallback to counting transactions if not connected - count = ( - self.session.query(Transactions) - .filter(Transactions.from_address == address) - .count() - ) - return count + try: + # Check connection - handle both is_connected and isConnected + is_connected = False + if hasattr(self.web3, 'is_connected'): + is_connected = self.web3.is_connected() + elif hasattr(self.web3, 'isConnected'): + is_connected = self.web3.isConnected() + + if is_connected: + # Pass 'pending' to include pending transactions for accuracy + return self.web3.eth.get_transaction_count(checksum_address, 'pending') + except Exception as e: + # Log the error and fall back to database count + print(f"[TRANSACTIONS_PROCESSOR]: Error getting transaction count from RPC: {e}") + + # Fallback to counting transactions from database + count = ( + self.session.query(Transactions) + .filter(Transactions.from_address == checksum_address) + .count() + ) + return count def get_transactions_for_address( self, @@ -846,11 +864,16 @@ def set_transaction_appeal_processing_time(self, transaction_hash: str): if not transaction: print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_processing_time update") return - transaction.appeal_processing_time += ( - round(time.time()) - transaction.timestamp_appeal - ) - flag_modified(transaction, "appeal_processing_time") - self.session.commit() + + # Check if timestamp_appeal is not None before performing arithmetic + if transaction.timestamp_appeal is not None: + transaction.appeal_processing_time += ( + round(time.time()) - transaction.timestamp_appeal + ) + flag_modified(transaction, "appeal_processing_time") + self.session.commit() + else: + print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} has no timestamp_appeal, skipping appeal_processing_time update") def reset_transaction_appeal_processing_time(self, transaction_hash: str): transaction = ( diff --git a/backend/node/create_nodes/providers_schema.json b/backend/node/create_nodes/providers_schema.json index aa36ee470..d7e2f3732 100644 --- a/backend/node/create_nodes/providers_schema.json +++ b/backend/node/create_nodes/providers_schema.json @@ -111,7 +111,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1"] + "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1", "gpt-5", "gpt-5-mini", "gpt-5-nano"] } } } diff --git a/backend/rollup/consensus_service.py b/backend/rollup/consensus_service.py index 27e685646..7d3ff3ae8 100644 --- a/backend/rollup/consensus_service.py +++ b/backend/rollup/consensus_service.py @@ -11,6 +11,30 @@ ) +# Custom exception classes for nonce errors +class NonceError(Exception): + """Base exception for nonce-related errors""" + pass + + +class NonceTooLowError(NonceError): + """Exception raised when transaction nonce is too low""" + def __init__(self, expected_nonce: int, actual_nonce: int, *args, **kwargs): + self.expected_nonce = expected_nonce + self.actual_nonce = actual_nonce + message = f"Nonce too low: expected {expected_nonce}, got {actual_nonce}" + super().__init__(message, *args, **kwargs) + + +class NonceTooHighError(NonceError): + """Exception raised when transaction nonce is too high""" + def __init__(self, expected_nonce: int, actual_nonce: int, *args, **kwargs): + self.expected_nonce = expected_nonce + self.actual_nonce = actual_nonce + message = f"Nonce too high: expected {expected_nonce}, got {actual_nonce}" + super().__init__(message, *args, **kwargs) + + class ConsensusService: def __init__(self): """ @@ -170,14 +194,25 @@ def add_transaction( except Exception as e: error_str = str(e) - error_type = ( - "nonce_too_high" - if "nonce too high" in error_str.lower() - else "nonce_too_low" if "nonce too low" in error_str.lower() else None - ) - # For nonce errors with pre-signed transactions, we can't fix them - # The transaction would need to be re-signed with the correct nonce - if error_type: + + # Check for nonce errors and raise specific exceptions + if "nonce too high" in error_str.lower(): + match = re.search( + r"Expected nonce to be (\d+) but got (\d+)", error_str + ) + if match: + expected_nonce = int(match.group(1)) + current_nonce = int(match.group(2)) + print( + f"[CONSENSUS_SERVICE]: Nonce too high - expected {expected_nonce}, got {current_nonce}. " + f"Transaction needs to be re-signed with correct nonce." + ) + raise NonceTooHighError(expected_nonce, current_nonce) from e + else: + # If we can't parse the nonces, still raise typed exception + raise NonceTooHighError(0, 0) from e + + elif "nonce too low" in error_str.lower(): match = re.search( r"Expected nonce to be (\d+) but got (\d+)", error_str ) @@ -185,14 +220,17 @@ def add_transaction( expected_nonce = int(match.group(1)) current_nonce = int(match.group(2)) print( - f"[CONSENSUS_SERVICE]: Nonce mismatch - expected {expected_nonce}, got {current_nonce}. " + f"[CONSENSUS_SERVICE]: Nonce too low - expected {expected_nonce}, got {current_nonce}. " f"Transaction needs to be re-signed with correct nonce." ) + raise NonceTooLowError(expected_nonce, current_nonce) from e + else: + # If we can't parse the nonces, still raise typed exception + raise NonceTooLowError(0, 0) from e print(f"[CONSENSUS_SERVICE]: Error forwarding transaction: {error_str}") - # Raise the exception to be handled by the caller instead of returning None - # This will ensure the transaction fails explicitly rather than getting stuck in PENDING - raise Exception(f"Transaction failed: {error_str}") + # Re-raise with chaining to preserve the original traceback + raise Exception(f"Transaction failed: {error_str}") from e def emit_transaction_event(self, event_name: str, account: dict, *args): """ diff --git a/backend/validators/llm.py b/backend/validators/llm.py index 11b7a4f69..a68ab33e2 100644 --- a/backend/validators/llm.py +++ b/backend/validators/llm.py @@ -11,6 +11,7 @@ import aiohttp from pathlib import Path import json +import contextlib from dotenv import load_dotenv @@ -61,29 +62,37 @@ def __del__(self): raise Exception("service was not terminated") async def stop(self): - if self._process is not None: - print(f"[LLMModule] Stopping process (PID: {self._process.pid})") - try: + if self._process is None: + return + + # Fast-path: check if process has already exited + if self._process.returncode is not None: + self._process = None + return + + print(f"[LLMModule] Stopping process (PID: {self._process.pid})") + + try: + # Try graceful shutdown with SIGINT + with contextlib.suppress(ProcessLookupError): self._process.send_signal(signal.SIGINT) - except ProcessLookupError: - pass try: # Wait for process to terminate with a timeout await asyncio.wait_for(self._process.wait(), timeout=5.0) - print(f"[LLMModule] Process terminated gracefully") + print("[LLMModule] Process terminated gracefully") except asyncio.TimeoutError: - print(f"[LLMModule] Process didn't terminate with SIGINT, trying SIGKILL") - # If SIGINT didn't work, try SIGKILL - try: - self._process.send_signal(signal.SIGKILL) - await asyncio.wait_for(self._process.wait(), timeout=2.0) - print(f"[LLMModule] Process terminated with SIGKILL") - except (asyncio.TimeoutError, ProcessLookupError): - print(f"[LLMModule] Process termination failed, continuing anyway") - # If still hanging, just give up and set to None - pass - + print("[LLMModule] Process didn't terminate with SIGINT, trying forceful termination") + # If SIGINT didn't work, use kill() for cross-platform compatibility + with contextlib.suppress(ProcessLookupError): + self._process.kill() + try: + await asyncio.wait_for(self._process.wait(), timeout=2.0) + print("[LLMModule] Process terminated forcefully") + except asyncio.TimeoutError: + print("[LLMModule] Process termination failed, continuing anyway") + finally: + # Ensure process handle is cleared even if exception occurs self._process = None async def restart(self): diff --git a/backend/validators/web.py b/backend/validators/web.py index 0a7da0af3..6bc61bb46 100644 --- a/backend/validators/web.py +++ b/backend/validators/web.py @@ -2,6 +2,7 @@ import signal import os import sys +import contextlib from pathlib import Path @@ -59,29 +60,37 @@ async def restart(self): ) async def stop(self): - if self._process is not None: - print(f"[WebModule] Stopping process (PID: {self._process.pid})") - try: + if self._process is None: + return + + # Fast-path: check if process has already exited + if self._process.returncode is not None: + self._process = None + return + + print(f"[WebModule] Stopping process (PID: {self._process.pid})") + + try: + # Try graceful shutdown with SIGINT + with contextlib.suppress(ProcessLookupError): self._process.send_signal(signal.SIGINT) - except ProcessLookupError: - pass try: # Wait for process to terminate with a timeout await asyncio.wait_for(self._process.wait(), timeout=5.0) - print(f"[WebModule] Process terminated gracefully") + print("[WebModule] Process terminated gracefully") except asyncio.TimeoutError: - print(f"[WebModule] Process didn't terminate with SIGINT, trying SIGKILL") - # If SIGINT didn't work, try SIGKILL - try: - self._process.send_signal(signal.SIGKILL) - await asyncio.wait_for(self._process.wait(), timeout=2.0) - print(f"[WebModule] Process terminated with SIGKILL") - except (asyncio.TimeoutError, ProcessLookupError): - print(f"[WebModule] Process termination failed, continuing anyway") - # If still hanging, just give up and set to None - pass - + print("[WebModule] Process didn't terminate with SIGINT, trying forceful termination") + # If SIGINT didn't work, use kill() for cross-platform compatibility + with contextlib.suppress(ProcessLookupError): + self._process.kill() + try: + await asyncio.wait_for(self._process.wait(), timeout=2.0) + print("[WebModule] Process terminated forcefully") + except asyncio.TimeoutError: + print("[WebModule] Process termination failed, continuing anyway") + finally: + # Ensure process handle is cleared even if exception occurs self._process = None async def verify_for_read(self): diff --git a/frontend/src/assets/schemas/providers_schema.json b/frontend/src/assets/schemas/providers_schema.json index 947f5f40d..f28be6625 100644 --- a/frontend/src/assets/schemas/providers_schema.json +++ b/frontend/src/assets/schemas/providers_schema.json @@ -111,7 +111,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1"] + "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1", "gpt-5", "gpt-5-mini", "gpt-5-nano"] } } } From 1ebcdebfe7f50db26774534d2c6d016d2f834d89 Mon Sep 17 00:00:00 2001 From: kstroobants Date: Mon, 11 Aug 2025 12:32:17 +0800 Subject: [PATCH 03/12] fix: mocking hardhat count --- .../transactions_processor.py | 58 ++++++++++++------- .../transactions_processor_test.py | 55 +++++++++++++++--- 2 files changed, 85 insertions(+), 28 deletions(-) diff --git a/backend/database_handler/transactions_processor.py b/backend/database_handler/transactions_processor.py index 8ec42e4bd..ff9ddc1d2 100644 --- a/backend/database_handler/transactions_processor.py +++ b/backend/database_handler/transactions_processor.py @@ -578,12 +578,14 @@ def update_transaction_status( transaction = ( self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) - + # If transaction doesn't exist (e.g., after snapshot restore), skip update if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping status update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping status update" + ) return - + transaction.status = new_status if update_current_status_changes: @@ -609,12 +611,14 @@ def set_transaction_result( transaction = ( self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) - + # If transaction doesn't exist (e.g., after snapshot restore), skip update if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping result update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping result update" + ) return - + transaction.consensus_data = consensus_data self.session.commit() @@ -624,24 +628,24 @@ def get_transaction_count(self, address: str) -> int: checksum_address = self.web3.to_checksum_address(address) except: checksum_address = address - + # Get the actual nonce from Hardhat instead of counting DB transactions # This ensures we're always in sync with the blockchain's nonce tracking try: # Check connection - handle both is_connected and isConnected is_connected = False - if hasattr(self.web3, 'is_connected'): + if hasattr(self.web3, "is_connected"): is_connected = self.web3.is_connected() - elif hasattr(self.web3, 'isConnected'): - is_connected = self.web3.isConnected() - + if is_connected: # Pass 'pending' to include pending transactions for accuracy - return self.web3.eth.get_transaction_count(checksum_address, 'pending') + return self.web3.eth.get_transaction_count(checksum_address, "pending") except Exception as e: # Log the error and fall back to database count - print(f"[TRANSACTIONS_PROCESSOR]: Error getting transaction count from RPC: {e}") - + print( + f"[TRANSACTIONS_PROCESSOR]: Error getting transaction count from RPC: {e}" + ) + # Fallback to counting transactions from database count = ( self.session.query(Transactions) @@ -714,7 +718,9 @@ def set_transaction_appeal_failed(self, transaction_hash: str, appeal_failed: in self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_failed update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_failed update" + ) return transaction.appeal_failed = appeal_failed @@ -725,7 +731,9 @@ def set_transaction_appeal_undetermined( self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_undetermined update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_undetermined update" + ) return transaction.appeal_undetermined = appeal_undetermined @@ -862,9 +870,11 @@ def set_transaction_appeal_processing_time(self, transaction_hash: str): self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_processing_time update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_processing_time update" + ) return - + # Check if timestamp_appeal is not None before performing arithmetic if transaction.timestamp_appeal is not None: transaction.appeal_processing_time += ( @@ -873,7 +883,9 @@ def set_transaction_appeal_processing_time(self, transaction_hash: str): flag_modified(transaction, "appeal_processing_time") self.session.commit() else: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} has no timestamp_appeal, skipping appeal_processing_time update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} has no timestamp_appeal, skipping appeal_processing_time update" + ) def reset_transaction_appeal_processing_time(self, transaction_hash: str): transaction = ( @@ -1034,7 +1046,9 @@ def set_transaction_appeal_leader_timeout( self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_leader_timeout update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_leader_timeout update" + ) return False transaction.appeal_leader_timeout = appeal_leader_timeout self.session.commit() @@ -1054,7 +1068,9 @@ def set_transaction_appeal_validators_timeout( self.session.query(Transactions).filter_by(hash=transaction_hash).first() ) if not transaction: - print(f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_validators_timeout update") + print( + f"[TRANSACTIONS_PROCESSOR]: Transaction {transaction_hash} not found, skipping appeal_validators_timeout update" + ) return False transaction.appeal_validators_timeout = appeal_validators_timeout self.session.commit() diff --git a/tests/db-sqlalchemy/transactions_processor_test.py b/tests/db-sqlalchemy/transactions_processor_test.py index a6906fe0b..da627f906 100644 --- a/tests/db-sqlalchemy/transactions_processor_test.py +++ b/tests/db-sqlalchemy/transactions_processor_test.py @@ -13,8 +13,46 @@ from backend.database_handler.transactions_processor import TransactionsProcessor -@pytest.fixture(autouse=True) -def mock_env_and_web3(): +def _create_mock_web3_instance(is_connected: bool): + """Helper function to create a mock Web3 instance with specified connection status.""" + web3_instance = Web3(MagicMock(spec=BaseProvider)) + web3_instance.eth = MagicMock() + web3_instance.eth.accounts = ["0x0000000000000000000000000000000000000000"] + + call_count = {"count": 0} + + def mock_get_transaction_count(address, block_identifier="latest"): + result = call_count["count"] + call_count["count"] += 1 + return result + + web3_instance.eth.get_transaction_count = mock_get_transaction_count + web3_instance.is_connected = MagicMock(return_value=is_connected) + + return web3_instance + + +@pytest.fixture +def mock_env_and_web3_connected(): + with patch.dict( + os.environ, + { + "HARDHAT_PORT": "8545", + "HARDHAT_URL": "http://localhost", + "HARDHAT_PRIVATE_KEY": "0x0123456789", + }, + ), patch("web3.Web3.HTTPProvider"): + web3_instance = _create_mock_web3_instance(is_connected=True) + + with patch( + "backend.database_handler.transactions_processor.Web3", + return_value=web3_instance, + ): + yield web3_instance + + +@pytest.fixture +def mock_env_and_web3_disconnected(): with patch.dict( os.environ, { @@ -23,17 +61,20 @@ def mock_env_and_web3(): "HARDHAT_PRIVATE_KEY": "0x0123456789", }, ), patch("web3.Web3.HTTPProvider"): - web3_instance = Web3(MagicMock(spec=BaseProvider)) - web3_instance.eth = MagicMock() - web3_instance.eth.accounts = ["0x0000000000000000000000000000000000000000"] + web3_instance = _create_mock_web3_instance(is_connected=False) + with patch( "backend.database_handler.transactions_processor.Web3", return_value=web3_instance, ): - yield + yield web3_instance -def test_transactions_processor(transactions_processor: TransactionsProcessor): +def test_transactions_processor( + transactions_processor: TransactionsProcessor, mock_env_and_web3_connected +): + # Override the web3 instance in the transactions_processor with our mock + transactions_processor.web3 = mock_env_and_web3_connected from_address = "0x9F0e84243496AcFB3Cd99D02eA59673c05901501" to_address = "0xAcec3A6d871C25F591aBd4fC24054e524BBbF794" From 4fd4795a7c2b2f33570a3e186823027a156368ef Mon Sep 17 00:00:00 2001 From: kstroobants Date: Mon, 11 Aug 2025 12:34:02 +0800 Subject: [PATCH 04/12] fix: ran precommit --- backend/consensus/base.py | 63 ++++++++++++++----- backend/database_handler/types.py | 14 ++++- .../anthropic_claude-4-opus.json | 2 +- .../anthropic_claude-4-sonnet.json | 2 +- .../google_gemini-2.5-flash-lite.json | 2 +- .../google_gemini-2.5-flash.json | 2 +- ...ristai_mistralaimixtral-8x7b-instruct.json | 2 +- .../openai_gpt-4-1106-preview.json | 2 +- .../openai_gpt-4.1-mini.json | 2 +- .../openai_gpt-4.1-nano.json | 2 +- .../default_providers/openai_gpt-4.1.json | 2 +- .../default_providers/xai_grok-3-mini.json | 2 +- .../default_providers/xai_grok-3.json | 2 +- .../default_providers/xai_grok-4-0709.json | 2 +- backend/node/types.py | 21 ++++--- backend/rollup/consensus_service.py | 7 ++- backend/validators/llm.py | 16 +++-- backend/validators/web.py | 16 +++-- .../src/assets/schemas/providers_schema.json | 24 ++++++- 19 files changed, 129 insertions(+), 56 deletions(-) diff --git a/backend/consensus/base.py b/backend/consensus/base.py index fbfcdcb16..b20abe7ea 100644 --- a/backend/consensus/base.py +++ b/backend/consensus/base.py @@ -1709,19 +1709,34 @@ async def handle(self, context): # Truncate large fields unless in DEBUG mode truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" transaction_data = context.transaction.to_dict() - if truncate and transaction_data.get("data") and isinstance(transaction_data["data"], dict): + if ( + truncate + and transaction_data.get("data") + and isinstance(transaction_data["data"], dict) + ): # Make a copy of the data dict to avoid modifying the original import copy + transaction_data = copy.deepcopy(transaction_data) # Truncate calldata if present if "calldata" in transaction_data["data"]: - if isinstance(transaction_data["data"]["calldata"], str) and len(transaction_data["data"]["calldata"]) > 100: - transaction_data["data"]["calldata"] = f"{transaction_data['data']['calldata'][:100]}... ({len(transaction_data['data']['calldata'])} chars)" + if ( + isinstance(transaction_data["data"]["calldata"], str) + and len(transaction_data["data"]["calldata"]) > 100 + ): + transaction_data["data"][ + "calldata" + ] = f"{transaction_data['data']['calldata'][:100]}... ({len(transaction_data['data']['calldata'])} chars)" # Truncate contract_code if present if "contract_code" in transaction_data["data"]: - if isinstance(transaction_data["data"]["contract_code"], str) and len(transaction_data["data"]["contract_code"]) > 100: - transaction_data["data"]["contract_code"] = f"{transaction_data['data']['contract_code'][:100]}... ({len(transaction_data['data']['contract_code'])} chars)" - + if ( + isinstance(transaction_data["data"]["contract_code"], str) + and len(transaction_data["data"]["contract_code"]) > 100 + ): + transaction_data["data"][ + "contract_code" + ] = f"{transaction_data['data']['contract_code'][:100]}... ({len(transaction_data['data']['contract_code'])} chars)" + context.msg_handler.send_message( LogEvent( "consensus_event", @@ -2403,7 +2418,9 @@ async def handle(self, context): "Reached consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict(truncate_large_fields=truncate), + "consensus_data": context.consensus_data.to_dict( + truncate_large_fields=truncate + ), }, transaction_hash=context.transaction.hash, ) @@ -2444,14 +2461,28 @@ async def handle(self, context): # Send a message indicating successful contract deployment # Truncate contract state for logging unless in DEBUG mode - truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" - log_data = new_contract if not truncate else { - "id": new_contract["id"], - "data": { - "state": "" if new_contract.get("data", {}).get("state") else None, - "code": f"<{len(new_contract.get('data', {}).get('code', ''))} chars>" if new_contract.get("data", {}).get("code") else None, + truncate = ( + os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" + ) + log_data = ( + new_contract + if not truncate + else { + "id": new_contract["id"], + "data": { + "state": ( + "" + if new_contract.get("data", {}).get("state") + else None + ), + "code": ( + f"<{len(new_contract.get('data', {}).get('code', ''))} chars>" + if new_contract.get("data", {}).get("code") + else None + ), + }, } - } + ) context.msg_handler.send_message( LogEvent( "deployed_contract", @@ -2554,7 +2585,9 @@ async def handle(self, context): "Failed to reach consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict(truncate_large_fields=truncate), + "consensus_data": context.consensus_data.to_dict( + truncate_large_fields=truncate + ), }, transaction_hash=context.transaction.hash, ) diff --git a/backend/database_handler/types.py b/backend/database_handler/types.py index cd0dd366b..c45ca53bd 100644 --- a/backend/database_handler/types.py +++ b/backend/database_handler/types.py @@ -15,11 +15,21 @@ def to_dict(self, truncate_large_fields=False): return { "votes": self.votes, "leader_receipt": ( - [receipt.to_dict(truncate_large_fields=truncate_large_fields) for receipt in self.leader_receipt] + [ + receipt.to_dict(truncate_large_fields=truncate_large_fields) + for receipt in self.leader_receipt + ] if self.leader_receipt else None ), - "validators": [receipt.to_dict(truncate_large_fields=truncate_large_fields) for receipt in self.validators] if self.validators else [], + "validators": ( + [ + receipt.to_dict(truncate_large_fields=truncate_large_fields) + for receipt in self.validators + ] + if self.validators + else [] + ), } @classmethod diff --git a/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json b/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json index df713fdff..b0507b34b 100644 --- a/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json +++ b/backend/node/create_nodes/default_providers/anthropic_claude-4-opus.json @@ -13,4 +13,4 @@ "api_key_env_var": "ANTHROPIC_API_KEY", "api_url": "https://api.anthropic.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json b/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json index ae0cf4876..19f869d58 100644 --- a/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json +++ b/backend/node/create_nodes/default_providers/anthropic_claude-4-sonnet.json @@ -13,4 +13,4 @@ "api_key_env_var": "ANTHROPIC_API_KEY", "api_url": "https://api.anthropic.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json index 9fd3c031a..77aa12760 100644 --- a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json +++ b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash-lite.json @@ -10,4 +10,4 @@ "api_key_env_var": "GEMINI_API_KEY", "api_url": "https://generativelanguage.googleapis.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json index 21527fb73..ce6118aed 100644 --- a/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json +++ b/backend/node/create_nodes/default_providers/google_gemini-2.5-flash.json @@ -10,4 +10,4 @@ "api_key_env_var": "GEMINI_API_KEY", "api_url": "https://generativelanguage.googleapis.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json b/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json index c4d376ec0..83f1be1bf 100644 --- a/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json +++ b/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x7b-instruct.json @@ -10,4 +10,4 @@ "api_key_env_var": "HEURISTAIAPIKEY", "api_url": "https://llm-gateway.heurist.xyz" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json b/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json index c4f8dcba0..5515ac2d3 100644 --- a/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json +++ b/backend/node/create_nodes/default_providers/openai_gpt-4-1106-preview.json @@ -10,4 +10,4 @@ "api_key_env_var": "OPENAIKEY", "api_url": "https://api.openai.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json index 69c3efa2d..fd0c89f8f 100644 --- a/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1-mini.json @@ -10,4 +10,4 @@ "api_key_env_var": "OPENAIKEY", "api_url": "https://api.openai.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json index 8f45dd25c..2b28909ee 100644 --- a/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1-nano.json @@ -10,4 +10,4 @@ "api_key_env_var": "OPENAIKEY", "api_url": "https://api.openai.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/openai_gpt-4.1.json b/backend/node/create_nodes/default_providers/openai_gpt-4.1.json index ef26adecd..ab1003336 100644 --- a/backend/node/create_nodes/default_providers/openai_gpt-4.1.json +++ b/backend/node/create_nodes/default_providers/openai_gpt-4.1.json @@ -10,4 +10,4 @@ "api_key_env_var": "OPENAIKEY", "api_url": "https://api.openai.com" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-3-mini.json b/backend/node/create_nodes/default_providers/xai_grok-3-mini.json index b758aff15..a1bb5b378 100644 --- a/backend/node/create_nodes/default_providers/xai_grok-3-mini.json +++ b/backend/node/create_nodes/default_providers/xai_grok-3-mini.json @@ -10,4 +10,4 @@ "api_key_env_var": "XAI_API_KEY", "api_url": "https://api.x.ai" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-3.json b/backend/node/create_nodes/default_providers/xai_grok-3.json index 3683fa1d2..a1a763fa1 100644 --- a/backend/node/create_nodes/default_providers/xai_grok-3.json +++ b/backend/node/create_nodes/default_providers/xai_grok-3.json @@ -10,4 +10,4 @@ "api_key_env_var": "XAI_API_KEY", "api_url": "https://api.x.ai" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-4-0709.json b/backend/node/create_nodes/default_providers/xai_grok-4-0709.json index 78f83d06d..216515143 100644 --- a/backend/node/create_nodes/default_providers/xai_grok-4-0709.json +++ b/backend/node/create_nodes/default_providers/xai_grok-4-0709.json @@ -10,4 +10,4 @@ "api_key_env_var": "XAI_API_KEY", "api_url": "https://api.x.ai" } -} \ No newline at end of file +} \ No newline at end of file diff --git a/backend/node/types.py b/backend/node/types.py index 4dccb1e06..a4865bf3e 100644 --- a/backend/node/types.py +++ b/backend/node/types.py @@ -195,35 +195,36 @@ class Receipt: def to_dict(self, truncate_large_fields=False): """Convert Receipt to dict. - + Args: truncate_large_fields: If True, truncate large fields for logging """ + def truncate_if_needed(value, field_name): if not truncate_large_fields: return value - + # Fields to truncate when in summary mode - if field_name in ['calldata', 'result']: + if field_name in ["calldata", "result"]: if isinstance(value, str) and len(value) > 100: return f"{value[:100]}... ({len(value)} chars)" - elif field_name == 'contract_state': + elif field_name == "contract_state": if value and len(str(value)) > 100: return f"<{len(value)} entries, truncated>" - + return value - + result = base64.b64encode(self.result).decode("ascii") calldata = str(base64.b64encode(self.calldata), encoding="ascii") - + return { "vote": self.vote.value if self.vote else None, "execution_result": self.execution_result.value, - "result": truncate_if_needed(result, 'result'), - "calldata": truncate_if_needed(calldata, 'calldata'), + "result": truncate_if_needed(result, "result"), + "calldata": truncate_if_needed(calldata, "calldata"), "gas_used": self.gas_used, "mode": self.mode.value, - "contract_state": truncate_if_needed(self.contract_state, 'contract_state'), + "contract_state": truncate_if_needed(self.contract_state, "contract_state"), "node_config": self.node_config, "eq_outputs": self.eq_outputs, "pending_transactions": [ diff --git a/backend/rollup/consensus_service.py b/backend/rollup/consensus_service.py index 7d3ff3ae8..a85519dec 100644 --- a/backend/rollup/consensus_service.py +++ b/backend/rollup/consensus_service.py @@ -14,11 +14,13 @@ # Custom exception classes for nonce errors class NonceError(Exception): """Base exception for nonce-related errors""" + pass class NonceTooLowError(NonceError): """Exception raised when transaction nonce is too low""" + def __init__(self, expected_nonce: int, actual_nonce: int, *args, **kwargs): self.expected_nonce = expected_nonce self.actual_nonce = actual_nonce @@ -28,6 +30,7 @@ def __init__(self, expected_nonce: int, actual_nonce: int, *args, **kwargs): class NonceTooHighError(NonceError): """Exception raised when transaction nonce is too high""" + def __init__(self, expected_nonce: int, actual_nonce: int, *args, **kwargs): self.expected_nonce = expected_nonce self.actual_nonce = actual_nonce @@ -194,7 +197,7 @@ def add_transaction( except Exception as e: error_str = str(e) - + # Check for nonce errors and raise specific exceptions if "nonce too high" in error_str.lower(): match = re.search( @@ -211,7 +214,7 @@ def add_transaction( else: # If we can't parse the nonces, still raise typed exception raise NonceTooHighError(0, 0) from e - + elif "nonce too low" in error_str.lower(): match = re.search( r"Expected nonce to be (\d+) but got (\d+)", error_str diff --git a/backend/validators/llm.py b/backend/validators/llm.py index a68ab33e2..f62ef81d7 100644 --- a/backend/validators/llm.py +++ b/backend/validators/llm.py @@ -64,25 +64,27 @@ def __del__(self): async def stop(self): if self._process is None: return - + # Fast-path: check if process has already exited if self._process.returncode is not None: self._process = None return - + print(f"[LLMModule] Stopping process (PID: {self._process.pid})") - + try: # Try graceful shutdown with SIGINT with contextlib.suppress(ProcessLookupError): self._process.send_signal(signal.SIGINT) - + try: # Wait for process to terminate with a timeout await asyncio.wait_for(self._process.wait(), timeout=5.0) print("[LLMModule] Process terminated gracefully") except asyncio.TimeoutError: - print("[LLMModule] Process didn't terminate with SIGINT, trying forceful termination") + print( + "[LLMModule] Process didn't terminate with SIGINT, trying forceful termination" + ) # If SIGINT didn't work, use kill() for cross-platform compatibility with contextlib.suppress(ProcessLookupError): self._process.kill() @@ -90,7 +92,9 @@ async def stop(self): await asyncio.wait_for(self._process.wait(), timeout=2.0) print("[LLMModule] Process terminated forcefully") except asyncio.TimeoutError: - print("[LLMModule] Process termination failed, continuing anyway") + print( + "[LLMModule] Process termination failed, continuing anyway" + ) finally: # Ensure process handle is cleared even if exception occurs self._process = None diff --git a/backend/validators/web.py b/backend/validators/web.py index 6bc61bb46..b0ae846c8 100644 --- a/backend/validators/web.py +++ b/backend/validators/web.py @@ -62,25 +62,27 @@ async def restart(self): async def stop(self): if self._process is None: return - + # Fast-path: check if process has already exited if self._process.returncode is not None: self._process = None return - + print(f"[WebModule] Stopping process (PID: {self._process.pid})") - + try: # Try graceful shutdown with SIGINT with contextlib.suppress(ProcessLookupError): self._process.send_signal(signal.SIGINT) - + try: # Wait for process to terminate with a timeout await asyncio.wait_for(self._process.wait(), timeout=5.0) print("[WebModule] Process terminated gracefully") except asyncio.TimeoutError: - print("[WebModule] Process didn't terminate with SIGINT, trying forceful termination") + print( + "[WebModule] Process didn't terminate with SIGINT, trying forceful termination" + ) # If SIGINT didn't work, use kill() for cross-platform compatibility with contextlib.suppress(ProcessLookupError): self._process.kill() @@ -88,7 +90,9 @@ async def stop(self): await asyncio.wait_for(self._process.wait(), timeout=2.0) print("[WebModule] Process terminated forcefully") except asyncio.TimeoutError: - print("[WebModule] Process termination failed, continuing anyway") + print( + "[WebModule] Process termination failed, continuing anyway" + ) finally: # Ensure process handle is cleared even if exception occurs self._process = None diff --git a/frontend/src/assets/schemas/providers_schema.json b/frontend/src/assets/schemas/providers_schema.json index f28be6625..dad45fd74 100644 --- a/frontend/src/assets/schemas/providers_schema.json +++ b/frontend/src/assets/schemas/providers_schema.json @@ -72,7 +72,11 @@ "const": "google" }, "model": { - "enum": ["gemini-2.0-flash-lite-001", "gemini-2.5-flash-lite", "gemini-2.5-flash"] + "enum": [ + "gemini-2.0-flash-lite-001", + "gemini-2.5-flash-lite", + "gemini-2.5-flash" + ] } } } @@ -111,7 +115,16 @@ "const": "openai-compatible" }, "model": { - "enum": ["gpt-4-1106-preview", "gpt-4o", "gpt-4.1-mini", "gpt-4.1-nano", "gpt-4.1", "gpt-5", "gpt-5-mini", "gpt-5-nano"] + "enum": [ + "gpt-4-1106-preview", + "gpt-4o", + "gpt-4.1-mini", + "gpt-4.1-nano", + "gpt-4.1", + "gpt-5", + "gpt-5-mini", + "gpt-5-nano" + ] } } } @@ -128,7 +141,12 @@ "const": "anthropic" }, "model": { - "enum": ["claude-3-7-sonnet-20250219", "claude-3-5-haiku-20241022", "claude-sonnet-4-20250514", "claude-opus-4-20250514"] + "enum": [ + "claude-3-7-sonnet-20250219", + "claude-3-5-haiku-20241022", + "claude-sonnet-4-20250514", + "claude-opus-4-20250514" + ] } } } From 1ee8c637b9c88c7ca94280c27837a3d99a597f26 Mon Sep 17 00:00:00 2001 From: kstroobants Date: Mon, 11 Aug 2025 13:40:08 +0800 Subject: [PATCH 05/12] fix: move scattered truncations to MessageHandler --- backend/consensus/base.py | 70 ++------------------ backend/database_handler/types.py | 12 +--- backend/node/types.py | 29 ++------ backend/protocol_rpc/message_handler/base.py | 61 ++++++++++++++++- 4 files changed, 72 insertions(+), 100 deletions(-) diff --git a/backend/consensus/base.py b/backend/consensus/base.py index b20abe7ea..52820876c 100644 --- a/backend/consensus/base.py +++ b/backend/consensus/base.py @@ -1706,37 +1706,6 @@ async def handle(self, context): not context.transaction.appeal_leader_timeout and not context.transaction.appeal_undetermined ): - # Truncate large fields unless in DEBUG mode - truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" - transaction_data = context.transaction.to_dict() - if ( - truncate - and transaction_data.get("data") - and isinstance(transaction_data["data"], dict) - ): - # Make a copy of the data dict to avoid modifying the original - import copy - - transaction_data = copy.deepcopy(transaction_data) - # Truncate calldata if present - if "calldata" in transaction_data["data"]: - if ( - isinstance(transaction_data["data"]["calldata"], str) - and len(transaction_data["data"]["calldata"]) > 100 - ): - transaction_data["data"][ - "calldata" - ] = f"{transaction_data['data']['calldata'][:100]}... ({len(transaction_data['data']['calldata'])} chars)" - # Truncate contract_code if present - if "contract_code" in transaction_data["data"]: - if ( - isinstance(transaction_data["data"]["contract_code"], str) - and len(transaction_data["data"]["contract_code"]) > 100 - ): - transaction_data["data"][ - "contract_code" - ] = f"{transaction_data['data']['contract_code'][:100]}... ({len(transaction_data['data']['contract_code'])} chars)" - context.msg_handler.send_message( LogEvent( "consensus_event", @@ -1745,7 +1714,7 @@ async def handle(self, context): "Executing transaction", { "transaction_hash": context.transaction.hash, - "transaction": transaction_data, + "transaction": context.transaction.to_dict(), }, transaction_hash=context.transaction.hash, ) @@ -2408,8 +2377,6 @@ async def handle(self, context): ) # Send a message indicating consensus was reached - # Truncate large fields unless in DEBUG mode - truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" context.msg_handler.send_message( LogEvent( "consensus_event", @@ -2418,9 +2385,7 @@ async def handle(self, context): "Reached consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict( - truncate_large_fields=truncate - ), + "consensus_data": context.consensus_data.to_dict(), }, transaction_hash=context.transaction.hash, ) @@ -2460,36 +2425,13 @@ async def handle(self, context): context.contract_processor.register_contract(new_contract) # Send a message indicating successful contract deployment - # Truncate contract state for logging unless in DEBUG mode - truncate = ( - os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" - ) - log_data = ( - new_contract - if not truncate - else { - "id": new_contract["id"], - "data": { - "state": ( - "" - if new_contract.get("data", {}).get("state") - else None - ), - "code": ( - f"<{len(new_contract.get('data', {}).get('code', ''))} chars>" - if new_contract.get("data", {}).get("code") - else None - ), - }, - } - ) context.msg_handler.send_message( LogEvent( "deployed_contract", EventType.SUCCESS, EventScope.GENVM, "Contract deployed", - log_data, + new_contract, transaction_hash=context.transaction.hash, ) ) @@ -2575,8 +2517,6 @@ async def handle(self, context): None: The transaction remains in an undetermined state. """ # Send a message indicating consensus failure - # Truncate large fields unless in DEBUG mode - truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" context.msg_handler.send_message( LogEvent( "consensus_event", @@ -2585,9 +2525,7 @@ async def handle(self, context): "Failed to reach consensus", { "transaction_hash": context.transaction.hash, - "consensus_data": context.consensus_data.to_dict( - truncate_large_fields=truncate - ), + "consensus_data": context.consensus_data.to_dict(), }, transaction_hash=context.transaction.hash, ) diff --git a/backend/database_handler/types.py b/backend/database_handler/types.py index c45ca53bd..ced1fbcfb 100644 --- a/backend/database_handler/types.py +++ b/backend/database_handler/types.py @@ -11,22 +11,16 @@ class ConsensusData: ) # first item is leader function, second item is validator function validators: list[Receipt] | None = None - def to_dict(self, truncate_large_fields=False): + def to_dict(self): return { "votes": self.votes, "leader_receipt": ( - [ - receipt.to_dict(truncate_large_fields=truncate_large_fields) - for receipt in self.leader_receipt - ] + [receipt.to_dict() for receipt in self.leader_receipt] if self.leader_receipt else None ), "validators": ( - [ - receipt.to_dict(truncate_large_fields=truncate_large_fields) - for receipt in self.validators - ] + [receipt.to_dict() for receipt in self.validators] if self.validators else [] ), diff --git a/backend/node/types.py b/backend/node/types.py index a4865bf3e..dfbc4e124 100644 --- a/backend/node/types.py +++ b/backend/node/types.py @@ -193,38 +193,19 @@ class Receipt: pending_transactions: Iterable[PendingTransaction] = () genvm_result: dict[str, str] | None = None - def to_dict(self, truncate_large_fields=False): - """Convert Receipt to dict. - - Args: - truncate_large_fields: If True, truncate large fields for logging - """ - - def truncate_if_needed(value, field_name): - if not truncate_large_fields: - return value - - # Fields to truncate when in summary mode - if field_name in ["calldata", "result"]: - if isinstance(value, str) and len(value) > 100: - return f"{value[:100]}... ({len(value)} chars)" - elif field_name == "contract_state": - if value and len(str(value)) > 100: - return f"<{len(value)} entries, truncated>" - - return value - + def to_dict(self): + """Convert Receipt to dict.""" result = base64.b64encode(self.result).decode("ascii") calldata = str(base64.b64encode(self.calldata), encoding="ascii") return { "vote": self.vote.value if self.vote else None, "execution_result": self.execution_result.value, - "result": truncate_if_needed(result, "result"), - "calldata": truncate_if_needed(calldata, "calldata"), + "result": result, + "calldata": calldata, "gas_used": self.gas_used, "mode": self.mode.value, - "contract_state": truncate_if_needed(self.contract_state, "contract_state"), + "contract_state": self.contract_state, "node_config": self.node_config, "eq_outputs": self.eq_outputs, "pending_transactions": [ diff --git a/backend/protocol_rpc/message_handler/base.py b/backend/protocol_rpc/message_handler/base.py index 3123d4fc0..3ce8f380b 100644 --- a/backend/protocol_rpc/message_handler/base.py +++ b/backend/protocol_rpc/message_handler/base.py @@ -1,5 +1,6 @@ import os import json +import copy from functools import wraps from logging.config import dictConfig import traceback @@ -81,7 +82,8 @@ def _log_message(self, log_event: LogEvent): if log_event.data: try: - data_str = json.dumps(log_event.data, default=lambda o: o.__dict__) + data_to_log = self._apply_log_level_truncation(log_event.data) + data_str = json.dumps(data_to_log, default=lambda o: o.__dict__) log_message += f" {gray}{data_str}{reset}" except TypeError as e: log_message += ( @@ -90,6 +92,63 @@ def _log_message(self, log_event: LogEvent): log_method(log_message) + def _apply_log_level_truncation(self, data, max_length=100): + """Apply LOG_LEVEL-based truncation to log data for better readability.""" + # Only truncate if not in DEBUG mode + should_truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" + + if not should_truncate or not isinstance(data, dict): + return data + + truncated_data = copy.deepcopy(data) + self._truncate_dict(truncated_data, max_length) + + return truncated_data + + def _truncate_dict(self, data_dict, max_length): + """Recursively truncate dictionary values based on key patterns.""" + if not isinstance(data_dict, dict): + return + + # String fields that should be truncated with length info + for key in ["calldata", "contract_code", "result"]: + if ( + key in data_dict + and isinstance(data_dict[key], str) + and len(data_dict[key]) > max_length + ): + data_dict[key] = ( + f"{data_dict[key][:max_length]}... ({len(data_dict[key])} chars)" + ) + + # Contract state - show entry count when truncated + if "contract_state" in data_dict and data_dict["contract_state"]: + value = data_dict["contract_state"] + if len(str(value)) > max_length: + if isinstance(value, dict): + data_dict["contract_state"] = f"<{len(value)} entries, truncated>" + else: + data_dict["contract_state"] = ( + f"<{len(str(value))} chars, truncated>" + ) + + # Contract state field - simple truncation message + if "state" in data_dict: + data_dict["state"] = "" + + # Contract code field - show character count + if "code" in data_dict and isinstance(data_dict["code"], str): + data_dict["code"] = f"<{len(data_dict['code'])} chars>" + + # Recursively process nested dictionaries and lists + for key, value in data_dict.items(): + if isinstance(value, dict): + self._truncate_dict(value, max_length) + elif isinstance(value, list): + for item in value: + if isinstance(item, dict): + self._truncate_dict(item, max_length) + def send_message(self, log_event: LogEvent, log_to_terminal: bool = True): if log_to_terminal: self._log_message(log_event) From b4b31d0a39f35b54e96ddc3e21d7385af133b3fe Mon Sep 17 00:00:00 2001 From: kstroobants Date: Mon, 11 Aug 2025 15:50:50 +0800 Subject: [PATCH 06/12] fix: do not send rollup tx when transactionType is sending for now --- backend/protocol_rpc/endpoints.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/protocol_rpc/endpoints.py b/backend/protocol_rpc/endpoints.py index 5d58ffbab..24bdec914 100644 --- a/backend/protocol_rpc/endpoints.py +++ b/backend/protocol_rpc/endpoints.py @@ -742,10 +742,6 @@ def send_raw_transaction( ) return tx_id_hex else: - rollup_transaction_details = consensus_service.add_transaction( - signed_rollup_transaction, from_address - ) - to_address = decoded_rollup_transaction.to_address nonce = decoded_rollup_transaction.nonce value = decoded_rollup_transaction.value @@ -755,8 +751,12 @@ def send_raw_transaction( transaction_data = {} leader_only = False + rollup_transaction_details = None if genlayer_transaction.type != TransactionType.SEND: leader_only = genlayer_transaction.data.leader_only + rollup_transaction_details = consensus_service.add_transaction( + signed_rollup_transaction, from_address + ) # because hardhat accounts are not funded if genlayer_transaction.type == TransactionType.DEPLOY_CONTRACT: if value > 0: From ee9a7c1e5bb45768bf631e92a9511c52861420ff Mon Sep 17 00:00:00 2001 From: Edgars Date: Mon, 11 Aug 2025 14:26:43 +0300 Subject: [PATCH 07/12] Feat: Forwards eth_ methods in batch requests to Hardhat This commit adds support for forwarding Ethereum-related methods (eth_*) within batch JSON-RPC requests to a Hardhat instance. Previously, batch requests containing eth_ methods were not handled correctly. This change ensures that such requests are forwarded to Hardhat for processing. Non-eth_ methods within batch requests are handled by Flask-JSONRPC. This allows interaction with Hardhat using batch requests. Error handling is also implemented, now if forwarding to Hardhat fails, an error response is returned to the client. --- backend/protocol_rpc/endpoint_generator.py | 28 ++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/backend/protocol_rpc/endpoint_generator.py b/backend/protocol_rpc/endpoint_generator.py index 14c21db10..70889b055 100644 --- a/backend/protocol_rpc/endpoint_generator.py +++ b/backend/protocol_rpc/endpoint_generator.py @@ -79,6 +79,34 @@ def handle_eth_methods(): if not request_json: return None + # Handle batch requests (list of requests) + if isinstance(request_json, list): + # For batch requests, check if any contain eth_ methods + has_eth_methods = any( + isinstance(req, dict) + and req.get("method", "").startswith("eth_") + for req in request_json + ) + if has_eth_methods: + # Forward the entire batch to Hardhat + try: + with requests.Session() as http: + result = http.post( + HARDHAT_URL, + json=request_json, + headers={"Content-Type": "application/json"}, + ) + return flask.Response( + result.content, + status=result.status_code, + headers=dict(result.headers), + ) + except requests.RequestException as e: + print(f"Error forwarding batch request to Hardhat: {e}") + return flask.jsonify({"error": str(e)}), 500 + return None # Let Flask-JSONRPC handle non-eth batch requests + + # Handle single request method = request_json.get("method", "") if method.startswith("eth_"): site = jsonrpc.get_jsonrpc_site() From 710d768c914131c387c9f5a81fc0b19ff754080b Mon Sep 17 00:00:00 2001 From: Edgars Date: Mon, 11 Aug 2025 18:39:05 +0300 Subject: [PATCH 08/12] Refactor: Remove unused providers and update RPC handling Removes deprecated and unused provider configurations to streamline the available options. Updates the RPC endpoint generator to improve handling of batch requests, including more robust error handling and logging, and ensures that requests are correctly forwarded to Hardhat when appropriate and not handled locally. Also includes bug fixes for nonce checking in consensus service Adds unit tests for batch request handling, consensus service nonce exceptions, and transaction processor improvements BREAKING CHANGE: Removes heuristai_mistralaimixtral-8x22b-instruct, xai_grok-3-mini, and xai_grok-4-0709 providers --- ...istai_mistralaimixtral-8x22b-instruct.json | 13 - .../default_providers/xai_grok-3-mini.json | 13 - .../default_providers/xai_grok-4-0709.json | 13 - .../node/create_nodes/providers_schema.json | 3 +- backend/protocol_rpc/endpoint_generator.py | 73 +++-- .../src/assets/schemas/providers_schema.json | 3 +- tests/unit/test_consensus_service.py | 143 ++++++++++ tests/unit/test_endpoint_generator_batch.py | 249 ++++++++++++++++++ ...est_transactions_processor_improvements.py | 191 ++++++++++++++ 9 files changed, 641 insertions(+), 60 deletions(-) delete mode 100644 backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x22b-instruct.json delete mode 100644 backend/node/create_nodes/default_providers/xai_grok-3-mini.json delete mode 100644 backend/node/create_nodes/default_providers/xai_grok-4-0709.json create mode 100644 tests/unit/test_consensus_service.py create mode 100644 tests/unit/test_endpoint_generator_batch.py create mode 100644 tests/unit/test_transactions_processor_improvements.py diff --git a/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x22b-instruct.json b/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x22b-instruct.json deleted file mode 100644 index 53c2c30d6..000000000 --- a/backend/node/create_nodes/default_providers/heuristai_mistralaimixtral-8x22b-instruct.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "provider": "heuristai", - "plugin": "openai-compatible", - "model": "mistralai/mixtral-8x22b-instruct", - "config": { - "temperature": 0.75, - "max_tokens": 500 - }, - "plugin_config": { - "api_key_env_var": "HEURISTAIAPIKEY", - "api_url": "https://llm-gateway.heurist.xyz" - } -} diff --git a/backend/node/create_nodes/default_providers/xai_grok-3-mini.json b/backend/node/create_nodes/default_providers/xai_grok-3-mini.json deleted file mode 100644 index a1bb5b378..000000000 --- a/backend/node/create_nodes/default_providers/xai_grok-3-mini.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "provider": "xai", - "plugin": "openai-compatible", - "model": "grok-3-mini", - "config": { - "temperature": 0.75, - "max_tokens": 500 - }, - "plugin_config": { - "api_key_env_var": "XAI_API_KEY", - "api_url": "https://api.x.ai" - } -} \ No newline at end of file diff --git a/backend/node/create_nodes/default_providers/xai_grok-4-0709.json b/backend/node/create_nodes/default_providers/xai_grok-4-0709.json deleted file mode 100644 index 216515143..000000000 --- a/backend/node/create_nodes/default_providers/xai_grok-4-0709.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "provider": "xai", - "plugin": "openai-compatible", - "model": "grok-4-0709", - "config": { - "temperature": 0.75, - "max_tokens": 500 - }, - "plugin_config": { - "api_key_env_var": "XAI_API_KEY", - "api_url": "https://api.x.ai" - } -} \ No newline at end of file diff --git a/backend/node/create_nodes/providers_schema.json b/backend/node/create_nodes/providers_schema.json index d7e2f3732..750886fe4 100644 --- a/backend/node/create_nodes/providers_schema.json +++ b/backend/node/create_nodes/providers_schema.json @@ -55,7 +55,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["grok-4-0709", "grok-3", "grok-3-mini"] + "enum": ["grok-3"] } } } @@ -91,7 +91,6 @@ "model": { "enum": [ "deepseek/deepseek-v3", - "mistralai/mixtral-8x22b-instruct", "mistralai/mixtral-8x7b-instruct", "meta-llama/llama-3.3-70b-instruct" ] diff --git a/backend/protocol_rpc/endpoint_generator.py b/backend/protocol_rpc/endpoint_generator.py index 70889b055..df007aa11 100644 --- a/backend/protocol_rpc/endpoint_generator.py +++ b/backend/protocol_rpc/endpoint_generator.py @@ -81,14 +81,26 @@ def handle_eth_methods(): # Handle batch requests (list of requests) if isinstance(request_json, list): - # For batch requests, check if any contain eth_ methods - has_eth_methods = any( - isinstance(req, dict) - and req.get("method", "").startswith("eth_") - for req in request_json - ) - if has_eth_methods: - # Forward the entire batch to Hardhat + # Check if batch contains only eth_ methods without local implementation + site = jsonrpc.get_jsonrpc_site() + all_eth_forward = True + + for req in request_json: + if isinstance(req, dict): + method = req.get("method", "") + # If it's not an eth_ method or we have a local implementation, can't forward all + if ( + not method.startswith("eth_") + or method in site.view_funcs + ): + all_eth_forward = False + break + else: + all_eth_forward = False + break + + # If all requests are eth_ methods without local implementation, forward entire batch + if all_eth_forward: try: with requests.Session() as http: result = http.post( @@ -102,9 +114,33 @@ def handle_eth_methods(): headers=dict(result.headers), ) except requests.RequestException as e: - print(f"Error forwarding batch request to Hardhat: {e}") - return flask.jsonify({"error": str(e)}), 500 - return None # Let Flask-JSONRPC handle non-eth batch requests + # Log the exception with traceback + app.logger.exception( + "Error forwarding batch request to Hardhat" + ) + + # Build JSON-RPC compliant error responses + error_responses = [] + for req in request_json: + error_response = { + "jsonrpc": "2.0", + "id": ( + req.get("id") if isinstance(req, dict) else None + ), + "error": { + "code": -32000, # Server error + "message": "Network error", + "data": f"Failed to forward request to Hardhat: {str(e)}", + }, + } + error_responses.append(error_response) + + # Return JSON-RPC error array with 200 status + return flask.jsonify(error_responses), 200 + + # Mixed batch or has local implementations - let Flask-JSONRPC handle it + # Flask-JSONRPC will process each request and forward unknowns to Hardhat + return None # Handle single request method = request_json.get("method", "") @@ -134,16 +170,19 @@ def handle_eth_methods(): return result except requests.RequestException as e: - print(f"Network error: {str(e)}") + # Log the exception with traceback + app.logger.exception( + "Error forwarding single request to Hardhat" + ) raise JSONRPCError( - code=-32603, - message=f"Error forwarding request to Hardhat: {str(e)}", - data={"original_error": str(e)}, + code=-32000, # Server error + message="Network error", + data=f"Failed to forward request to Hardhat: {str(e)}", ) except Exception as e: - print(f"Error in before_request handler: {str(e)}") - print(traceback.format_exc()) + # Log the exception with traceback + app.logger.exception("Error in before_request handler") return None # Continue normal processing for non-eth methods diff --git a/frontend/src/assets/schemas/providers_schema.json b/frontend/src/assets/schemas/providers_schema.json index dad45fd74..57de3ce2f 100644 --- a/frontend/src/assets/schemas/providers_schema.json +++ b/frontend/src/assets/schemas/providers_schema.json @@ -55,7 +55,7 @@ "const": "openai-compatible" }, "model": { - "enum": ["grok-4-0709", "grok-3", "grok-3-mini"] + "enum": ["grok-3"] } } } @@ -95,7 +95,6 @@ "model": { "enum": [ "deepseek/deepseek-v3", - "mistralai/mixtral-8x22b-instruct", "mistralai/mixtral-8x7b-instruct", "meta-llama/llama-3.3-70b-instruct" ] diff --git a/tests/unit/test_consensus_service.py b/tests/unit/test_consensus_service.py new file mode 100644 index 000000000..1692343d2 --- /dev/null +++ b/tests/unit/test_consensus_service.py @@ -0,0 +1,143 @@ +"""Unit tests for consensus_service.py""" + +import pytest +from unittest.mock import Mock, patch, MagicMock +from backend.rollup.consensus_service import ( + ConsensusService, + NonceError, + NonceTooLowError, + NonceTooHighError, +) + + +class TestNonceExceptions: + """Test custom nonce exception classes""" + + def test_nonce_too_low_error_initialization(self): + """Test NonceTooLowError initialization and attributes""" + error = NonceTooLowError(expected_nonce=5, actual_nonce=3) + + assert error.expected_nonce == 5 + assert error.actual_nonce == 3 + assert str(error) == "Nonce too low: expected 5, got 3" + assert isinstance(error, NonceError) + assert isinstance(error, Exception) + + def test_nonce_too_high_error_initialization(self): + """Test NonceTooHighError initialization and attributes""" + error = NonceTooHighError(expected_nonce=3, actual_nonce=5) + + assert error.expected_nonce == 3 + assert error.actual_nonce == 5 + assert str(error) == "Nonce too high: expected 3, got 5" + assert isinstance(error, NonceError) + assert isinstance(error, Exception) + + def test_nonce_error_with_chaining(self): + """Test exception chaining with 'from' clause""" + original_error = ValueError("Original error") + + try: + raise NonceTooLowError(5, 3) from original_error + except NonceTooLowError as e: + assert e.__cause__ == original_error + assert e.expected_nonce == 5 + assert e.actual_nonce == 3 + + def test_nonce_error_inheritance(self): + """Test that NonceError is properly inherited""" + assert issubclass(NonceTooLowError, NonceError) + assert issubclass(NonceTooHighError, NonceError) + assert issubclass(NonceError, Exception) + + +class TestConsensusService: + """Test ConsensusService class methods""" + + @patch('backend.rollup.consensus_service.Web3') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_consensus_service_initialization(self, mock_web3): + """Test ConsensusService initialization""" + mock_web3_instance = Mock() + mock_web3_instance.is_connected.return_value = True + mock_web3.return_value = mock_web3_instance + + service = ConsensusService() + + assert service.web3 == mock_web3_instance + assert service.web3_connected is True + mock_web3.assert_called_once() + mock_web3_instance.is_connected.assert_called_once() + + @patch('backend.rollup.consensus_service.Web3') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_add_transaction_nonce_too_low(self, mock_web3): + """Test add_transaction with nonce too low error""" + mock_web3_instance = Mock() + mock_web3_instance.is_connected.return_value = True + mock_web3.return_value = mock_web3_instance + + service = ConsensusService() + + # Mock forward_transaction to raise an error + with patch.object(service, 'forward_transaction') as mock_forward: + mock_forward.side_effect = Exception("Expected nonce to be 5 but got 3. Nonce too low") + + with pytest.raises(NonceTooLowError) as exc_info: + service.add_transaction({"data": "test"}, "0x123") + + assert exc_info.value.expected_nonce == 5 + assert exc_info.value.actual_nonce == 3 + + @patch('backend.rollup.consensus_service.Web3') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_add_transaction_nonce_too_high(self, mock_web3): + """Test add_transaction with nonce too high error""" + mock_web3_instance = Mock() + mock_web3_instance.is_connected.return_value = True + mock_web3.return_value = mock_web3_instance + + service = ConsensusService() + + # Mock forward_transaction to raise an error + with patch.object(service, 'forward_transaction') as mock_forward: + mock_forward.side_effect = Exception("Expected nonce to be 3 but got 5. Nonce too high") + + with pytest.raises(NonceTooHighError) as exc_info: + service.add_transaction({"data": "test"}, "0x123") + + assert exc_info.value.expected_nonce == 3 + assert exc_info.value.actual_nonce == 5 + + @patch('backend.rollup.consensus_service.Web3') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_add_transaction_generic_error(self, mock_web3): + """Test add_transaction with generic error""" + mock_web3_instance = Mock() + mock_web3_instance.is_connected.return_value = True + mock_web3.return_value = mock_web3_instance + + service = ConsensusService() + + # Mock forward_transaction to raise a generic error + with patch.object(service, 'forward_transaction') as mock_forward: + mock_forward.side_effect = Exception("Some other error") + + with pytest.raises(Exception) as exc_info: + service.add_transaction({"data": "test"}, "0x123") + + assert "Transaction failed: Some other error" in str(exc_info.value) + + @patch('backend.rollup.consensus_service.Web3') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_add_transaction_not_connected(self, mock_web3): + """Test add_transaction when not connected""" + mock_web3_instance = Mock() + mock_web3_instance.is_connected.return_value = False + mock_web3.return_value = mock_web3_instance + + service = ConsensusService() + + result = service.add_transaction({"data": "test"}, "0x123") + + assert result is None \ No newline at end of file diff --git a/tests/unit/test_endpoint_generator_batch.py b/tests/unit/test_endpoint_generator_batch.py new file mode 100644 index 000000000..082158f52 --- /dev/null +++ b/tests/unit/test_endpoint_generator_batch.py @@ -0,0 +1,249 @@ +"""Unit tests for endpoint_generator.py batch request handling""" + +import pytest +import json +from unittest.mock import Mock, patch, MagicMock +from flask import Flask, request, g +from flask_jsonrpc import JSONRPC +from flask_jsonrpc.exceptions import JSONRPCError +import requests + + +class TestBatchRequestHandling: + """Test batch request handling in endpoint_generator""" + + def setup_method(self): + """Set up test fixtures""" + self.app = Flask(__name__) + self.jsonrpc = JSONRPC(self.app, '/api') + self.client = self.app.test_client() + + # Add the handle_eth_methods function to app context + self.app.logger = Mock() + + @patch('backend.protocol_rpc.endpoint_generator.requests.Session') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_batch_all_eth_methods_success(self, mock_session): + """Test forwarding batch with all eth_ methods""" + # Mock successful Hardhat response + mock_response = Mock() + mock_response.content = json.dumps([ + {"jsonrpc": "2.0", "id": 1, "result": "0x1"}, + {"jsonrpc": "2.0", "id": 2, "result": "0x2"} + ]).encode() + mock_response.status_code = 200 + mock_response.headers = {} + + mock_http = Mock() + mock_http.post.return_value = mock_response + mock_session.return_value.__enter__.return_value = mock_http + + # Import after mocking + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json=[ + {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, + {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"} + ] + ): + # Create a mock app and jsonrpc for the function + mock_app = Mock() + mock_app.logger = Mock() + mock_jsonrpc = Mock() + mock_site = Mock() + mock_site.view_funcs = {} # No local implementations + mock_jsonrpc.get_jsonrpc_site.return_value = mock_site + + # Test the batch handling logic + batch_request = request.get_json() + assert isinstance(batch_request, list) + assert len(batch_request) == 2 + + # Verify all are eth_ methods + for req in batch_request: + assert req["method"].startswith("eth_") + + @patch('backend.protocol_rpc.endpoint_generator.requests.Session') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_batch_network_error(self, mock_session): + """Test batch request with network error""" + # Mock network error + mock_http = Mock() + mock_http.post.side_effect = requests.RequestException("Connection refused") + mock_session.return_value.__enter__.return_value = mock_http + + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json=[ + {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, + {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"} + ] + ): + mock_app = Mock() + mock_app.logger = Mock() + + # The error should be logged + # Each request should get an error response + batch_request = request.get_json() + + # Build expected error responses + expected_errors = [] + for req in batch_request: + expected_errors.append({ + "jsonrpc": "2.0", + "id": req.get("id"), + "error": { + "code": -32000, + "message": "Network error", + "data": "Failed to forward request to Hardhat: Connection refused" + } + }) + + # Verify error response structure + assert len(expected_errors) == 2 + assert all(err["error"]["code"] == -32000 for err in expected_errors) + + def test_batch_mixed_methods(self): + """Test batch with mixed eth_ and non-eth_ methods""" + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json=[ + {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, + {"jsonrpc": "2.0", "id": 2, "method": "gen_getBalance"}, + {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"} + ] + ): + batch_request = request.get_json() + + # Check we have mixed methods + eth_methods = [r for r in batch_request if r["method"].startswith("eth_")] + non_eth_methods = [r for r in batch_request if not r["method"].startswith("eth_")] + + assert len(eth_methods) == 2 + assert len(non_eth_methods) == 1 + assert batch_request[1]["method"] == "gen_getBalance" + + def test_batch_invalid_request_format(self): + """Test batch with invalid request format""" + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json=[ + {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, + "invalid_request", # Not a dict + {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"} + ] + ): + batch_request = request.get_json() + + # Check invalid request detection + assert isinstance(batch_request[0], dict) + assert not isinstance(batch_request[1], dict) + assert isinstance(batch_request[2], dict) + + @patch('backend.protocol_rpc.endpoint_generator.requests.Session') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_single_eth_request_success(self, mock_session): + """Test single eth_ request forwarding""" + mock_response = Mock() + mock_response.json.return_value = {"jsonrpc": "2.0", "id": 1, "result": "0x1"} + + mock_http = Mock() + mock_http.post.return_value = mock_response + mock_session.return_value.__enter__.return_value = mock_http + + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"} + ): + request_data = request.get_json() + + assert request_data["method"] == "eth_blockNumber" + assert request_data["id"] == 1 + + @patch('backend.protocol_rpc.endpoint_generator.requests.Session') + @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + def test_single_eth_request_network_error(self, mock_session): + """Test single eth_ request with network error""" + mock_http = Mock() + mock_http.post.side_effect = requests.RequestException("Connection timeout") + mock_session.return_value.__enter__.return_value = mock_http + + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"} + ): + # Should raise JSONRPCError + # with code -32000 and appropriate message + pass + + def test_non_eth_request_passthrough(self): + """Test that non-eth_ requests are not intercepted""" + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json={"jsonrpc": "2.0", "id": 1, "method": "gen_getBalance", "params": []} + ): + request_data = request.get_json() + + # Should not be intercepted + assert request_data["method"] == "gen_getBalance" + assert not request_data["method"].startswith("eth_") + + def test_empty_batch_request(self): + """Test empty batch request""" + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + with self.app.test_request_context( + '/api', + method='POST', + json=[] + ): + batch_request = request.get_json() + + assert isinstance(batch_request, list) + assert len(batch_request) == 0 + + def test_batch_with_local_eth_implementation(self): + """Test batch where some eth_ methods have local implementation""" + from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint + + # Mock a local implementation for eth_getBalance + mock_jsonrpc = Mock() + mock_site = Mock() + mock_site.view_funcs = {"eth_getBalance": Mock()} # Has local implementation + mock_jsonrpc.get_jsonrpc_site.return_value = mock_site + + with self.app.test_request_context( + '/api', + method='POST', + json=[ + {"jsonrpc": "2.0", "id": 1, "method": "eth_getBalance"}, # Local + {"jsonrpc": "2.0", "id": 2, "method": "eth_blockNumber"} # Forward + ] + ): + batch_request = request.get_json() + + # eth_getBalance should be handled locally + # eth_blockNumber should be forwarded + assert batch_request[0]["method"] == "eth_getBalance" + assert batch_request[1]["method"] == "eth_blockNumber" \ No newline at end of file diff --git a/tests/unit/test_transactions_processor_improvements.py b/tests/unit/test_transactions_processor_improvements.py new file mode 100644 index 000000000..9cacc3244 --- /dev/null +++ b/tests/unit/test_transactions_processor_improvements.py @@ -0,0 +1,191 @@ +"""Unit tests for improved methods in transactions_processor.py""" + +import pytest +import time +from unittest.mock import Mock, patch, MagicMock, PropertyMock +from backend.database_handler.transactions_processor import TransactionsProcessor +from backend.database_handler.models import Transactions, TransactionStatus +from sqlalchemy.orm import Session + + +class TestGetTransactionCount: + """Test the improved get_transaction_count method""" + + @patch('backend.database_handler.transactions_processor.Session') + def setup_method(self, method, mock_session_class): + """Set up test fixtures""" + self.mock_session = Mock(spec=Session) + mock_session_class.return_value = self.mock_session + self.mock_web3 = Mock() + self.processor = TransactionsProcessor(Mock()) + self.processor.web3 = self.mock_web3 + self.processor.session = self.mock_session + + def test_get_transaction_count_with_checksum_address(self): + """Test get_transaction_count with address normalization""" + # Setup + test_address = "0xabcdef1234567890abcdef1234567890abcdef12" + checksum_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" + + self.mock_web3.to_checksum_address.return_value = checksum_address + self.mock_web3.is_connected.return_value = True + self.mock_web3.eth.get_transaction_count.return_value = 5 + + # Execute + result = self.processor.get_transaction_count(test_address) + + # Verify + self.mock_web3.to_checksum_address.assert_called_once_with(test_address) + self.mock_web3.eth.get_transaction_count.assert_called_once_with(checksum_address, 'pending') + assert result == 5 + + def test_get_transaction_count_with_invalid_address(self): + """Test get_transaction_count with invalid address that can't be checksummed""" + # Setup + test_address = "invalid_address" + + self.mock_web3.to_checksum_address.side_effect = Exception("Invalid address") + self.mock_web3.is_connected.return_value = True + self.mock_web3.eth.get_transaction_count.return_value = 3 + + # Execute + result = self.processor.get_transaction_count(test_address) + + # Verify - should use original address + self.mock_web3.eth.get_transaction_count.assert_called_once_with(test_address, 'pending') + assert result == 3 + + def test_get_transaction_count_with_pending_parameter(self): + """Test that get_transaction_count uses 'pending' parameter""" + # Setup + test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" + + self.mock_web3.to_checksum_address.return_value = test_address + self.mock_web3.is_connected.return_value = True + self.mock_web3.eth.get_transaction_count.return_value = 10 + + # Execute + result = self.processor.get_transaction_count(test_address) + + # Verify 'pending' is passed as second argument + self.mock_web3.eth.get_transaction_count.assert_called_once_with(test_address, 'pending') + assert result == 10 + + def test_get_transaction_count_connection_error(self): + """Test get_transaction_count when RPC connection fails""" + # Setup + test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" + + self.mock_web3.to_checksum_address.return_value = test_address + self.mock_web3.is_connected.return_value = True + self.mock_web3.eth.get_transaction_count.side_effect = Exception("Connection error") + + # Mock database fallback + mock_query = Mock() + mock_query.filter.return_value = mock_query + mock_query.count.return_value = 7 + self.mock_session.query.return_value = mock_query + + # Execute + with patch('builtins.print') as mock_print: + result = self.processor.get_transaction_count(test_address) + + # Verify fallback to database + assert result == 7 + mock_print.assert_called_once() + assert "Error getting transaction count from RPC" in str(mock_print.call_args) + + def test_get_transaction_count_not_connected(self): + """Test get_transaction_count when web3 is not connected""" + # Setup + test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" + + self.mock_web3.to_checksum_address.return_value = test_address + self.mock_web3.is_connected.return_value = False + + # Mock database fallback + mock_query = Mock() + mock_query.filter.return_value = mock_query + mock_query.count.return_value = 15 + self.mock_session.query.return_value = mock_query + + # Execute + result = self.processor.get_transaction_count(test_address) + + # Verify it falls back to database + assert result == 15 + self.mock_web3.eth.get_transaction_count.assert_not_called() + + def test_get_transaction_count_with_isConnected_method(self): + """Test handling of older web3 versions with isConnected method""" + # Setup + test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" + + self.mock_web3.to_checksum_address.return_value = test_address + + # Remove is_connected, add isConnected (older web3 version) + delattr(self.mock_web3, 'is_connected') + self.mock_web3.isConnected = Mock(return_value=True) + + self.mock_web3.eth.get_transaction_count.return_value = 8 + + # Execute + result = self.processor.get_transaction_count(test_address) + + # Verify - should still work with isConnected + # Note: The actual implementation needs to be updated to handle this + # For now, this test documents the expected behavior + pass + + +class TestSetTransactionAppealProcessingTime: + """Test the improved set_transaction_appeal_processing_time method""" + + @patch('backend.database_handler.transactions_processor.Session') + def setup_method(self, method, mock_session_class): + """Set up test fixtures""" + self.mock_session = Mock(spec=Session) + mock_session_class.return_value = self.mock_session + self.processor = TransactionsProcessor(Mock()) + self.processor.session = self.mock_session + + + def test_appeal_processing_time_with_none_timestamp(self): + """Test set_transaction_appeal_processing_time with None timestamp_appeal""" + # Setup + mock_transaction = Mock(spec=Transactions) + mock_transaction.timestamp_appeal = None + mock_transaction.appeal_processing_time = 0 + + mock_query = Mock() + mock_query.filter_by.return_value = mock_query + mock_query.first.return_value = mock_transaction + self.mock_session.query.return_value = mock_query + + # Execute + with patch('builtins.print') as mock_print: + self.processor.set_transaction_appeal_processing_time("test_hash") + + # Verify - should not update and should print message + assert mock_transaction.appeal_processing_time == 0 # Unchanged + self.mock_session.commit.assert_not_called() + mock_print.assert_called_once() + assert "has no timestamp_appeal" in str(mock_print.call_args) + + def test_appeal_processing_time_transaction_not_found(self): + """Test set_transaction_appeal_processing_time when transaction doesn't exist""" + # Setup + mock_query = Mock() + mock_query.filter_by.return_value = mock_query + mock_query.first.return_value = None + self.mock_session.query.return_value = mock_query + + # Execute + with patch('builtins.print') as mock_print: + self.processor.set_transaction_appeal_processing_time("nonexistent_hash") + + # Verify + self.mock_session.commit.assert_not_called() + mock_print.assert_called_once() + assert "not found" in str(mock_print.call_args) + From b065f376adee3e97ca8a8aa17ff4491886a0c9f3 Mon Sep 17 00:00:00 2001 From: kstroobants Date: Tue, 12 Aug 2025 16:13:11 +0800 Subject: [PATCH 09/12] fix: rabbitai remarks; failing test because of mixtral-8x22b-instruct --- .env.example | 2 +- .../transactions_processor.py | 2 + backend/protocol_rpc/endpoint_generator.py | 4 +- frontend/test/e2e/CreateNodeValidator.spec.ts | 2 +- frontend/test/e2e/DeleteNodeValidator.spec.ts | 2 +- frontend/test/e2e/UpdateNodeValidator.spec.ts | 2 +- frontend/test/pages/ValidatorsPage.ts | 2 +- tests/unit/test_consensus_service.py | 86 ++++---- tests/unit/test_endpoint_generator_batch.py | 191 +++++++++--------- tests/unit/test_providers.py | 2 +- ...est_transactions_processor_improvements.py | 99 +++++---- 11 files changed, 198 insertions(+), 196 deletions(-) diff --git a/.env.example b/.env.example index f15223ffd..e7f38d1ff 100644 --- a/.env.example +++ b/.env.example @@ -91,7 +91,7 @@ HEURISTAIAPIKEY = '' # {"stake": 100, "provider": "xai", "model": "grok-2-1212", "amount": 2}, # {"stake": 100, "provider": "anthropic", "model": "claude-3-5-haiku-20241022", "amount": 2}, # {"stake": 100, "provider": "anthropic", "model": "claude-3-7-sonnet-20250219", "amount": 2}, -# {"stake": 100, "provider": "heuristai", "model": "mistralai/mixtral-8x22b-instruct", "amount": 1}, +# {"stake": 100, "provider": "heuristai", "model": "mistralai/mixtral-8x7b-instruct", "amount": 1}, # {"stake": 100, "provider": "heuristai", "model": "meta-llama/llama-3.3-70b-instruct", "amount": 1}, # {"stake": 100, "provider": "heuristai", "model": "deepseek/deepseek-v3", "amount": 1}, # {"stake": 100, "provider": "google", "model": "gemini-2.0-flash-lite-001", "amount": 2} diff --git a/backend/database_handler/transactions_processor.py b/backend/database_handler/transactions_processor.py index ff9ddc1d2..af59b0fbe 100644 --- a/backend/database_handler/transactions_processor.py +++ b/backend/database_handler/transactions_processor.py @@ -636,6 +636,8 @@ def get_transaction_count(self, address: str) -> int: is_connected = False if hasattr(self.web3, "is_connected"): is_connected = self.web3.is_connected() + elif hasattr(self.web3, "isConnected"): # older web3 version + is_connected = self.web3.isConnected() if is_connected: # Pass 'pending' to include pending transactions for accuracy diff --git a/backend/protocol_rpc/endpoint_generator.py b/backend/protocol_rpc/endpoint_generator.py index df007aa11..dc1693d06 100644 --- a/backend/protocol_rpc/endpoint_generator.py +++ b/backend/protocol_rpc/endpoint_generator.py @@ -130,7 +130,7 @@ def handle_eth_methods(): "error": { "code": -32000, # Server error "message": "Network error", - "data": f"Failed to forward request to Hardhat: {str(e)}", + "data": f"An internal error occurred while forwarding the request to Hardhat.", }, } error_responses.append(error_response) @@ -177,7 +177,7 @@ def handle_eth_methods(): raise JSONRPCError( code=-32000, # Server error message="Network error", - data=f"Failed to forward request to Hardhat: {str(e)}", + data=f"An internal error occurred while forwarding the request to Hardhat.", ) except Exception as e: diff --git a/frontend/test/e2e/CreateNodeValidator.spec.ts b/frontend/test/e2e/CreateNodeValidator.spec.ts index d947b7f11..004e436a4 100644 --- a/frontend/test/e2e/CreateNodeValidator.spec.ts +++ b/frontend/test/e2e/CreateNodeValidator.spec.ts @@ -28,7 +28,7 @@ describe('Settings - Create Node Validator', () => { const initialValidators = await validatorsPage.getValidatorsElements(); await validatorsPage.createValidator({ provider: 'heuristai', - model: 'mistralai/mixtral-8x22b-instruct', + model: 'mistralai/mixtral-8x7b-instruct', stake: 7, }); const existingValidators = await validatorsPage.getValidatorsElements(); diff --git a/frontend/test/e2e/DeleteNodeValidator.spec.ts b/frontend/test/e2e/DeleteNodeValidator.spec.ts index 2b7afc606..44b4742f4 100644 --- a/frontend/test/e2e/DeleteNodeValidator.spec.ts +++ b/frontend/test/e2e/DeleteNodeValidator.spec.ts @@ -27,7 +27,7 @@ describe('Settings - Delete Node Validator', () => { await validatorsPage.createValidator({ provider: 'heuristai', - model: 'mistralai/mixtral-8x22b-instruct', + model: 'mistralai/mixtral-8x7b-instruct', stake: 7, }); const existingValidators = await validatorsPage.getValidatorsElements(); diff --git a/frontend/test/e2e/UpdateNodeValidator.spec.ts b/frontend/test/e2e/UpdateNodeValidator.spec.ts index 0b7b576ee..fdd71abe3 100644 --- a/frontend/test/e2e/UpdateNodeValidator.spec.ts +++ b/frontend/test/e2e/UpdateNodeValidator.spec.ts @@ -26,7 +26,7 @@ describe('Settings - Update Node Validator', () => { await validatorsPage.createValidator({ provider: 'heuristai', - model: 'mistralai/mixtral-8x22b-instruct', + model: 'mistralai/mixtral-8x7b-instruct', stake: 7, }); const existingValidators = await validatorsPage.getValidatorsElements(); diff --git a/frontend/test/pages/ValidatorsPage.ts b/frontend/test/pages/ValidatorsPage.ts index b79b9394c..c1e917b1a 100644 --- a/frontend/test/pages/ValidatorsPage.ts +++ b/frontend/test/pages/ValidatorsPage.ts @@ -72,7 +72,7 @@ export class ValidatorsPage extends BasePage { if (initialValidators.length < 1) { await this.createValidator({ provider: 'heuristai', - model: 'mistralai/mixtral-8x22b-instruct', + model: 'mistralai/mixtral-8x7b-instruct', stake: 7, }); const existingValidators = await this.getValidatorsElements(); diff --git a/tests/unit/test_consensus_service.py b/tests/unit/test_consensus_service.py index 1692343d2..a63d9e7b3 100644 --- a/tests/unit/test_consensus_service.py +++ b/tests/unit/test_consensus_service.py @@ -16,7 +16,7 @@ class TestNonceExceptions: def test_nonce_too_low_error_initialization(self): """Test NonceTooLowError initialization and attributes""" error = NonceTooLowError(expected_nonce=5, actual_nonce=3) - + assert error.expected_nonce == 5 assert error.actual_nonce == 3 assert str(error) == "Nonce too low: expected 5, got 3" @@ -26,7 +26,7 @@ def test_nonce_too_low_error_initialization(self): def test_nonce_too_high_error_initialization(self): """Test NonceTooHighError initialization and attributes""" error = NonceTooHighError(expected_nonce=3, actual_nonce=5) - + assert error.expected_nonce == 3 assert error.actual_nonce == 5 assert str(error) == "Nonce too high: expected 3, got 5" @@ -36,7 +36,7 @@ def test_nonce_too_high_error_initialization(self): def test_nonce_error_with_chaining(self): """Test exception chaining with 'from' clause""" original_error = ValueError("Original error") - + try: raise NonceTooLowError(5, 3) from original_error except NonceTooLowError as e: @@ -54,90 +54,104 @@ def test_nonce_error_inheritance(self): class TestConsensusService: """Test ConsensusService class methods""" - @patch('backend.rollup.consensus_service.Web3') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.rollup.consensus_service.Web3") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_consensus_service_initialization(self, mock_web3): """Test ConsensusService initialization""" mock_web3_instance = Mock() mock_web3_instance.is_connected.return_value = True mock_web3.return_value = mock_web3_instance - + service = ConsensusService() - + assert service.web3 == mock_web3_instance assert service.web3_connected is True mock_web3.assert_called_once() mock_web3_instance.is_connected.assert_called_once() - @patch('backend.rollup.consensus_service.Web3') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.rollup.consensus_service.Web3") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_add_transaction_nonce_too_low(self, mock_web3): """Test add_transaction with nonce too low error""" mock_web3_instance = Mock() mock_web3_instance.is_connected.return_value = True mock_web3.return_value = mock_web3_instance - + service = ConsensusService() - + # Mock forward_transaction to raise an error - with patch.object(service, 'forward_transaction') as mock_forward: - mock_forward.side_effect = Exception("Expected nonce to be 5 but got 3. Nonce too low") - + with patch.object(service, "forward_transaction") as mock_forward: + mock_forward.side_effect = Exception( + "Expected nonce to be 5 but got 3. Nonce too low" + ) + with pytest.raises(NonceTooLowError) as exc_info: service.add_transaction({"data": "test"}, "0x123") - + assert exc_info.value.expected_nonce == 5 assert exc_info.value.actual_nonce == 3 - @patch('backend.rollup.consensus_service.Web3') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.rollup.consensus_service.Web3") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_add_transaction_nonce_too_high(self, mock_web3): """Test add_transaction with nonce too high error""" mock_web3_instance = Mock() mock_web3_instance.is_connected.return_value = True mock_web3.return_value = mock_web3_instance - + service = ConsensusService() - + # Mock forward_transaction to raise an error - with patch.object(service, 'forward_transaction') as mock_forward: - mock_forward.side_effect = Exception("Expected nonce to be 3 but got 5. Nonce too high") - + with patch.object(service, "forward_transaction") as mock_forward: + mock_forward.side_effect = Exception( + "Expected nonce to be 3 but got 5. Nonce too high" + ) + with pytest.raises(NonceTooHighError) as exc_info: service.add_transaction({"data": "test"}, "0x123") - + assert exc_info.value.expected_nonce == 3 assert exc_info.value.actual_nonce == 5 - @patch('backend.rollup.consensus_service.Web3') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.rollup.consensus_service.Web3") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_add_transaction_generic_error(self, mock_web3): """Test add_transaction with generic error""" mock_web3_instance = Mock() mock_web3_instance.is_connected.return_value = True mock_web3.return_value = mock_web3_instance - + service = ConsensusService() - + # Mock forward_transaction to raise a generic error - with patch.object(service, 'forward_transaction') as mock_forward: + with patch.object(service, "forward_transaction") as mock_forward: mock_forward.side_effect = Exception("Some other error") - + with pytest.raises(Exception) as exc_info: service.add_transaction({"data": "test"}, "0x123") - + assert "Transaction failed: Some other error" in str(exc_info.value) - @patch('backend.rollup.consensus_service.Web3') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.rollup.consensus_service.Web3") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_add_transaction_not_connected(self, mock_web3): """Test add_transaction when not connected""" mock_web3_instance = Mock() mock_web3_instance.is_connected.return_value = False mock_web3.return_value = mock_web3_instance - + service = ConsensusService() - + result = service.add_transaction({"data": "test"}, "0x123") - - assert result is None \ No newline at end of file + + assert result is None diff --git a/tests/unit/test_endpoint_generator_batch.py b/tests/unit/test_endpoint_generator_batch.py index 082158f52..a0ed77fe4 100644 --- a/tests/unit/test_endpoint_generator_batch.py +++ b/tests/unit/test_endpoint_generator_batch.py @@ -1,11 +1,9 @@ """Unit tests for endpoint_generator.py batch request handling""" -import pytest import json -from unittest.mock import Mock, patch, MagicMock -from flask import Flask, request, g -from flask_jsonrpc import JSONRPC -from flask_jsonrpc.exceptions import JSONRPCError +from unittest.mock import Mock, patch +from flask import Flask, request +from flask_jsonrpc.app import JSONRPC import requests @@ -15,39 +13,40 @@ class TestBatchRequestHandling: def setup_method(self): """Set up test fixtures""" self.app = Flask(__name__) - self.jsonrpc = JSONRPC(self.app, '/api') + self.jsonrpc = JSONRPC(self.app, "/api") self.client = self.app.test_client() - + # Add the handle_eth_methods function to app context self.app.logger = Mock() - @patch('backend.protocol_rpc.endpoint_generator.requests.Session') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.protocol_rpc.endpoint_generator.requests.Session") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_batch_all_eth_methods_success(self, mock_session): """Test forwarding batch with all eth_ methods""" # Mock successful Hardhat response mock_response = Mock() - mock_response.content = json.dumps([ - {"jsonrpc": "2.0", "id": 1, "result": "0x1"}, - {"jsonrpc": "2.0", "id": 2, "result": "0x2"} - ]).encode() + mock_response.content = json.dumps( + [ + {"jsonrpc": "2.0", "id": 1, "result": "0x1"}, + {"jsonrpc": "2.0", "id": 2, "result": "0x2"}, + ] + ).encode() mock_response.status_code = 200 mock_response.headers = {} - + mock_http = Mock() mock_http.post.return_value = mock_response mock_session.return_value.__enter__.return_value = mock_http - - # Import after mocking - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - + with self.app.test_request_context( - '/api', - method='POST', + "/api", + method="POST", json=[ {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, - {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"} - ] + {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"}, + ], ): # Create a mock app and jsonrpc for the function mock_app = Mock() @@ -56,139 +55,139 @@ def test_batch_all_eth_methods_success(self, mock_session): mock_site = Mock() mock_site.view_funcs = {} # No local implementations mock_jsonrpc.get_jsonrpc_site.return_value = mock_site - + # Test the batch handling logic batch_request = request.get_json() assert isinstance(batch_request, list) assert len(batch_request) == 2 - + # Verify all are eth_ methods for req in batch_request: assert req["method"].startswith("eth_") - @patch('backend.protocol_rpc.endpoint_generator.requests.Session') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.protocol_rpc.endpoint_generator.requests.Session") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_batch_network_error(self, mock_session): """Test batch request with network error""" # Mock network error mock_http = Mock() mock_http.post.side_effect = requests.RequestException("Connection refused") mock_session.return_value.__enter__.return_value = mock_http - - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - + with self.app.test_request_context( - '/api', - method='POST', + "/api", + method="POST", json=[ {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, - {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"} - ] + {"jsonrpc": "2.0", "id": 2, "method": "eth_gasPrice"}, + ], ): mock_app = Mock() mock_app.logger = Mock() - + # The error should be logged # Each request should get an error response batch_request = request.get_json() - + # Build expected error responses expected_errors = [] for req in batch_request: - expected_errors.append({ - "jsonrpc": "2.0", - "id": req.get("id"), - "error": { - "code": -32000, - "message": "Network error", - "data": "Failed to forward request to Hardhat: Connection refused" + expected_errors.append( + { + "jsonrpc": "2.0", + "id": req.get("id"), + "error": { + "code": -32000, + "message": "Network error", + "data": "Failed to forward request to Hardhat: Connection refused", + }, } - }) - + ) + # Verify error response structure assert len(expected_errors) == 2 assert all(err["error"]["code"] == -32000 for err in expected_errors) def test_batch_mixed_methods(self): """Test batch with mixed eth_ and non-eth_ methods""" - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - with self.app.test_request_context( - '/api', - method='POST', + "/api", + method="POST", json=[ {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, {"jsonrpc": "2.0", "id": 2, "method": "gen_getBalance"}, - {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"} - ] + {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"}, + ], ): batch_request = request.get_json() - + # Check we have mixed methods eth_methods = [r for r in batch_request if r["method"].startswith("eth_")] - non_eth_methods = [r for r in batch_request if not r["method"].startswith("eth_")] - + non_eth_methods = [ + r for r in batch_request if not r["method"].startswith("eth_") + ] + assert len(eth_methods) == 2 assert len(non_eth_methods) == 1 assert batch_request[1]["method"] == "gen_getBalance" def test_batch_invalid_request_format(self): """Test batch with invalid request format""" - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - with self.app.test_request_context( - '/api', - method='POST', + "/api", + method="POST", json=[ {"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, "invalid_request", # Not a dict - {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"} - ] + {"jsonrpc": "2.0", "id": 3, "method": "eth_gasPrice"}, + ], ): batch_request = request.get_json() - + # Check invalid request detection assert isinstance(batch_request[0], dict) assert not isinstance(batch_request[1], dict) assert isinstance(batch_request[2], dict) - @patch('backend.protocol_rpc.endpoint_generator.requests.Session') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.protocol_rpc.endpoint_generator.requests.Session") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_single_eth_request_success(self, mock_session): """Test single eth_ request forwarding""" mock_response = Mock() mock_response.json.return_value = {"jsonrpc": "2.0", "id": 1, "result": "0x1"} - + mock_http = Mock() mock_http.post.return_value = mock_response mock_session.return_value.__enter__.return_value = mock_http - - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - + with self.app.test_request_context( - '/api', - method='POST', - json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"} + "/api", + method="POST", + json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, ): request_data = request.get_json() - + assert request_data["method"] == "eth_blockNumber" assert request_data["id"] == 1 - @patch('backend.protocol_rpc.endpoint_generator.requests.Session') - @patch.dict('os.environ', {'HARDHAT_PORT': '8545', 'HARDHAT_URL': 'http://localhost'}) + @patch("backend.protocol_rpc.endpoint_generator.requests.Session") + @patch.dict( + "os.environ", {"HARDHAT_PORT": "8545", "HARDHAT_URL": "http://localhost"} + ) def test_single_eth_request_network_error(self, mock_session): """Test single eth_ request with network error""" mock_http = Mock() mock_http.post.side_effect = requests.RequestException("Connection timeout") mock_session.return_value.__enter__.return_value = mock_http - - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - + with self.app.test_request_context( - '/api', - method='POST', - json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"} + "/api", + method="POST", + json={"jsonrpc": "2.0", "id": 1, "method": "eth_blockNumber"}, ): # Should raise JSONRPCError # with code -32000 and appropriate message @@ -196,54 +195,44 @@ def test_single_eth_request_network_error(self, mock_session): def test_non_eth_request_passthrough(self): """Test that non-eth_ requests are not intercepted""" - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - with self.app.test_request_context( - '/api', - method='POST', - json={"jsonrpc": "2.0", "id": 1, "method": "gen_getBalance", "params": []} + "/api", + method="POST", + json={"jsonrpc": "2.0", "id": 1, "method": "gen_getBalance", "params": []}, ): request_data = request.get_json() - + # Should not be intercepted assert request_data["method"] == "gen_getBalance" assert not request_data["method"].startswith("eth_") def test_empty_batch_request(self): """Test empty batch request""" - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - - with self.app.test_request_context( - '/api', - method='POST', - json=[] - ): + with self.app.test_request_context("/api", method="POST", json=[]): batch_request = request.get_json() - + assert isinstance(batch_request, list) assert len(batch_request) == 0 def test_batch_with_local_eth_implementation(self): """Test batch where some eth_ methods have local implementation""" - from backend.protocol_rpc.endpoint_generator import generate_rpc_endpoint - # Mock a local implementation for eth_getBalance mock_jsonrpc = Mock() mock_site = Mock() mock_site.view_funcs = {"eth_getBalance": Mock()} # Has local implementation mock_jsonrpc.get_jsonrpc_site.return_value = mock_site - + with self.app.test_request_context( - '/api', - method='POST', + "/api", + method="POST", json=[ {"jsonrpc": "2.0", "id": 1, "method": "eth_getBalance"}, # Local - {"jsonrpc": "2.0", "id": 2, "method": "eth_blockNumber"} # Forward - ] + {"jsonrpc": "2.0", "id": 2, "method": "eth_blockNumber"}, # Forward + ], ): batch_request = request.get_json() - + # eth_getBalance should be handled locally # eth_blockNumber should be forwarded assert batch_request[0]["method"] == "eth_getBalance" - assert batch_request[1]["method"] == "eth_blockNumber" \ No newline at end of file + assert batch_request[1]["method"] == "eth_blockNumber" diff --git a/tests/unit/test_providers.py b/tests/unit/test_providers.py index 8169b7ee6..6c9f01f1f 100644 --- a/tests/unit/test_providers.py +++ b/tests/unit/test_providers.py @@ -29,7 +29,7 @@ def test_default_providers_valid(): LLMProvider( plugin="openai-compatible", provider="heuristai", - model="mistralai/mixtral-8x22b-instruct", + model="mistralai/mixtral-8x7b-instruct", config={ "max_tokens": 100, "temperature": 0.5, diff --git a/tests/unit/test_transactions_processor_improvements.py b/tests/unit/test_transactions_processor_improvements.py index 9cacc3244..1936ca24c 100644 --- a/tests/unit/test_transactions_processor_improvements.py +++ b/tests/unit/test_transactions_processor_improvements.py @@ -1,21 +1,17 @@ """Unit tests for improved methods in transactions_processor.py""" -import pytest -import time -from unittest.mock import Mock, patch, MagicMock, PropertyMock +from unittest.mock import Mock, patch from backend.database_handler.transactions_processor import TransactionsProcessor -from backend.database_handler.models import Transactions, TransactionStatus +from backend.database_handler.models import Transactions from sqlalchemy.orm import Session class TestGetTransactionCount: """Test the improved get_transaction_count method""" - @patch('backend.database_handler.transactions_processor.Session') - def setup_method(self, method, mock_session_class): + def setup_method(self, method): """Set up test fixtures""" self.mock_session = Mock(spec=Session) - mock_session_class.return_value = self.mock_session self.mock_web3 = Mock() self.processor = TransactionsProcessor(Mock()) self.processor.web3 = self.mock_web3 @@ -26,70 +22,78 @@ def test_get_transaction_count_with_checksum_address(self): # Setup test_address = "0xabcdef1234567890abcdef1234567890abcdef12" checksum_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" - + self.mock_web3.to_checksum_address.return_value = checksum_address self.mock_web3.is_connected.return_value = True self.mock_web3.eth.get_transaction_count.return_value = 5 - + # Execute result = self.processor.get_transaction_count(test_address) - + # Verify self.mock_web3.to_checksum_address.assert_called_once_with(test_address) - self.mock_web3.eth.get_transaction_count.assert_called_once_with(checksum_address, 'pending') + self.mock_web3.eth.get_transaction_count.assert_called_once_with( + checksum_address, "pending" + ) assert result == 5 def test_get_transaction_count_with_invalid_address(self): """Test get_transaction_count with invalid address that can't be checksummed""" # Setup test_address = "invalid_address" - + self.mock_web3.to_checksum_address.side_effect = Exception("Invalid address") self.mock_web3.is_connected.return_value = True self.mock_web3.eth.get_transaction_count.return_value = 3 - + # Execute result = self.processor.get_transaction_count(test_address) - + # Verify - should use original address - self.mock_web3.eth.get_transaction_count.assert_called_once_with(test_address, 'pending') + self.mock_web3.eth.get_transaction_count.assert_called_once_with( + test_address, "pending" + ) assert result == 3 def test_get_transaction_count_with_pending_parameter(self): """Test that get_transaction_count uses 'pending' parameter""" # Setup test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" - + self.mock_web3.to_checksum_address.return_value = test_address self.mock_web3.is_connected.return_value = True self.mock_web3.eth.get_transaction_count.return_value = 10 - + # Execute result = self.processor.get_transaction_count(test_address) - + # Verify 'pending' is passed as second argument - self.mock_web3.eth.get_transaction_count.assert_called_once_with(test_address, 'pending') + self.mock_web3.eth.get_transaction_count.assert_called_once_with( + test_address, "pending" + ) assert result == 10 def test_get_transaction_count_connection_error(self): """Test get_transaction_count when RPC connection fails""" # Setup test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" - + self.mock_web3.to_checksum_address.return_value = test_address self.mock_web3.is_connected.return_value = True - self.mock_web3.eth.get_transaction_count.side_effect = Exception("Connection error") - + self.mock_web3.eth.get_transaction_count.side_effect = Exception( + "Connection error" + ) + # Mock database fallback mock_query = Mock() mock_query.filter.return_value = mock_query mock_query.count.return_value = 7 self.mock_session.query.return_value = mock_query - + # Execute - with patch('builtins.print') as mock_print: + with patch("builtins.print") as mock_print: result = self.processor.get_transaction_count(test_address) - + # Verify fallback to database assert result == 7 mock_print.assert_called_once() @@ -99,19 +103,19 @@ def test_get_transaction_count_not_connected(self): """Test get_transaction_count when web3 is not connected""" # Setup test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" - + self.mock_web3.to_checksum_address.return_value = test_address self.mock_web3.is_connected.return_value = False - + # Mock database fallback mock_query = Mock() mock_query.filter.return_value = mock_query mock_query.count.return_value = 15 self.mock_session.query.return_value = mock_query - + # Execute result = self.processor.get_transaction_count(test_address) - + # Verify it falls back to database assert result == 15 self.mock_web3.eth.get_transaction_count.assert_not_called() @@ -120,52 +124,46 @@ def test_get_transaction_count_with_isConnected_method(self): """Test handling of older web3 versions with isConnected method""" # Setup test_address = "0xABcdEF1234567890aBcDef1234567890AbCdEf12" - + self.mock_web3.to_checksum_address.return_value = test_address - + # Remove is_connected, add isConnected (older web3 version) - delattr(self.mock_web3, 'is_connected') + delattr(self.mock_web3, "is_connected") self.mock_web3.isConnected = Mock(return_value=True) - + self.mock_web3.eth.get_transaction_count.return_value = 8 - + # Execute result = self.processor.get_transaction_count(test_address) - - # Verify - should still work with isConnected - # Note: The actual implementation needs to be updated to handle this - # For now, this test documents the expected behavior - pass + self.mock_web3.isConnected.assert_called_once() + assert result == 8 class TestSetTransactionAppealProcessingTime: """Test the improved set_transaction_appeal_processing_time method""" - @patch('backend.database_handler.transactions_processor.Session') - def setup_method(self, method, mock_session_class): + def setup_method(self, method): """Set up test fixtures""" self.mock_session = Mock(spec=Session) - mock_session_class.return_value = self.mock_session self.processor = TransactionsProcessor(Mock()) self.processor.session = self.mock_session - def test_appeal_processing_time_with_none_timestamp(self): """Test set_transaction_appeal_processing_time with None timestamp_appeal""" # Setup mock_transaction = Mock(spec=Transactions) mock_transaction.timestamp_appeal = None mock_transaction.appeal_processing_time = 0 - + mock_query = Mock() mock_query.filter_by.return_value = mock_query mock_query.first.return_value = mock_transaction self.mock_session.query.return_value = mock_query - + # Execute - with patch('builtins.print') as mock_print: + with patch("builtins.print") as mock_print: self.processor.set_transaction_appeal_processing_time("test_hash") - + # Verify - should not update and should print message assert mock_transaction.appeal_processing_time == 0 # Unchanged self.mock_session.commit.assert_not_called() @@ -179,13 +177,12 @@ def test_appeal_processing_time_transaction_not_found(self): mock_query.filter_by.return_value = mock_query mock_query.first.return_value = None self.mock_session.query.return_value = mock_query - + # Execute - with patch('builtins.print') as mock_print: + with patch("builtins.print") as mock_print: self.processor.set_transaction_appeal_processing_time("nonexistent_hash") - + # Verify self.mock_session.commit.assert_not_called() mock_print.assert_called_once() assert "not found" in str(mock_print.call_args) - From f9d95b081b6674b1a3da79c36b7e9876db8e7279 Mon Sep 17 00:00:00 2001 From: kstroobants Date: Tue, 12 Aug 2025 16:44:06 +0800 Subject: [PATCH 10/12] fix: rabbitai remarks --- backend/database_handler/transactions_processor.py | 2 ++ backend/protocol_rpc/endpoint_generator.py | 6 +++--- tests/unit/test_consensus_service.py | 2 +- tests/unit/test_transactions_processor_improvements.py | 3 +++ 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/backend/database_handler/transactions_processor.py b/backend/database_handler/transactions_processor.py index af59b0fbe..573c9285c 100644 --- a/backend/database_handler/transactions_processor.py +++ b/backend/database_handler/transactions_processor.py @@ -725,6 +725,7 @@ def set_transaction_appeal_failed(self, transaction_hash: str, appeal_failed: in ) return transaction.appeal_failed = appeal_failed + self.session.commit() def set_transaction_appeal_undetermined( self, transaction_hash: str, appeal_undetermined: bool @@ -738,6 +739,7 @@ def set_transaction_appeal_undetermined( ) return transaction.appeal_undetermined = appeal_undetermined + self.session.commit() def get_highest_timestamp(self) -> int: transaction = ( diff --git a/backend/protocol_rpc/endpoint_generator.py b/backend/protocol_rpc/endpoint_generator.py index dc1693d06..12df48d8a 100644 --- a/backend/protocol_rpc/endpoint_generator.py +++ b/backend/protocol_rpc/endpoint_generator.py @@ -113,7 +113,7 @@ def handle_eth_methods(): status=result.status_code, headers=dict(result.headers), ) - except requests.RequestException as e: + except requests.RequestException: # Log the exception with traceback app.logger.exception( "Error forwarding batch request to Hardhat" @@ -169,7 +169,7 @@ def handle_eth_methods(): return result - except requests.RequestException as e: + except requests.RequestException: # Log the exception with traceback app.logger.exception( "Error forwarding single request to Hardhat" @@ -180,7 +180,7 @@ def handle_eth_methods(): data=f"An internal error occurred while forwarding the request to Hardhat.", ) - except Exception as e: + except Exception: # Log the exception with traceback app.logger.exception("Error in before_request handler") return None # Continue normal processing for non-eth methods diff --git a/tests/unit/test_consensus_service.py b/tests/unit/test_consensus_service.py index a63d9e7b3..6662c0c00 100644 --- a/tests/unit/test_consensus_service.py +++ b/tests/unit/test_consensus_service.py @@ -1,7 +1,7 @@ """Unit tests for consensus_service.py""" import pytest -from unittest.mock import Mock, patch, MagicMock +from unittest.mock import Mock, patch from backend.rollup.consensus_service import ( ConsensusService, NonceError, diff --git a/tests/unit/test_transactions_processor_improvements.py b/tests/unit/test_transactions_processor_improvements.py index 1936ca24c..9e162489d 100644 --- a/tests/unit/test_transactions_processor_improvements.py +++ b/tests/unit/test_transactions_processor_improvements.py @@ -136,6 +136,9 @@ def test_get_transaction_count_with_isConnected_method(self): # Execute result = self.processor.get_transaction_count(test_address) self.mock_web3.isConnected.assert_called_once() + self.mock_web3.eth.get_transaction_count.assert_called_once_with( + test_address, "pending" + ) assert result == 8 From f7256db97b54d368087c47a59d8a6bff90971206 Mon Sep 17 00:00:00 2001 From: kstroobants Date: Tue, 12 Aug 2025 17:03:04 +0800 Subject: [PATCH 11/12] fix: rabbitai remarks --- backend/protocol_rpc/endpoint_generator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/protocol_rpc/endpoint_generator.py b/backend/protocol_rpc/endpoint_generator.py index 12df48d8a..0343bfce5 100644 --- a/backend/protocol_rpc/endpoint_generator.py +++ b/backend/protocol_rpc/endpoint_generator.py @@ -130,7 +130,7 @@ def handle_eth_methods(): "error": { "code": -32000, # Server error "message": "Network error", - "data": f"An internal error occurred while forwarding the request to Hardhat.", + "data": "An internal error occurred while forwarding the request to Hardhat.", }, } error_responses.append(error_response) @@ -177,7 +177,7 @@ def handle_eth_methods(): raise JSONRPCError( code=-32000, # Server error message="Network error", - data=f"An internal error occurred while forwarding the request to Hardhat.", + data="An internal error occurred while forwarding the request to Hardhat.", ) except Exception: From 36dbfa96a80de54b87b3ff5ee14dde5db030b17f Mon Sep 17 00:00:00 2001 From: kstroobants Date: Fri, 15 Aug 2025 17:37:06 +0800 Subject: [PATCH 12/12] feat: decode logs --- backend/protocol_rpc/message_handler/base.py | 326 +++++++- tests/unit/test_message_handler_decoding.py | 742 +++++++++++++++++++ 2 files changed, 1062 insertions(+), 6 deletions(-) create mode 100644 tests/unit/test_message_handler_decoding.py diff --git a/backend/protocol_rpc/message_handler/base.py b/backend/protocol_rpc/message_handler/base.py index 3ce8f380b..4244032c0 100644 --- a/backend/protocol_rpc/message_handler/base.py +++ b/backend/protocol_rpc/message_handler/base.py @@ -4,17 +4,18 @@ from functools import wraps from logging.config import dictConfig import traceback +import base64 from flask import request from flask_jsonrpc.exceptions import JSONRPCError from loguru import logger import sys -from backend.protocol_rpc.message_handler.types import LogEvent from flask_socketio import SocketIO from backend.protocol_rpc.configuration import GlobalConfiguration from backend.protocol_rpc.message_handler.types import EventScope, EventType, LogEvent +from backend.node.genvm.origin import calldata MAX_LOG_MESSAGE_LENGTH = 3000 @@ -92,15 +93,328 @@ def _log_message(self, log_event: LogEvent): log_method(log_message) - def _apply_log_level_truncation(self, data, max_length=100): - """Apply LOG_LEVEL-based truncation to log data for better readability.""" + def _decode_value(self, value, key="", parent_key=""): + """Main entry point for decoding any type of value. + Provides a unified interface to decode complex nested data structures by recursively + processing different data types and delegating to specialized decoders based on + the value type and contextual key information. + """ + if isinstance(value, dict): + return {k: self._decode_value(v, k, key) for k, v in value.items()} + + elif isinstance(value, list): + return [self._decode_value(item, key, parent_key) for item in value] + + elif isinstance(value, str): + if not value: + return value + + decoded_value = self._decode_by_key(value, key, parent_key) + + # If it's still a string, check if it's JSON that might contain data fields + if isinstance(decoded_value, str) and decoded_value != value: + return decoded_value + elif isinstance(decoded_value, str): + return self._try_decode_json_with_data_fields(decoded_value) + else: + return decoded_value + + elif isinstance(value, (bytes, memoryview)): + decoded_bytes = bytes(value) if isinstance(value, memoryview) else value + return self._decode_bytes_by_key(decoded_bytes, key, parent_key) + + else: + return value + + def _decode_by_key(self, value, key, parent_key=""): + """Converts base64-encoded strings to raw bytes. + Many values in the system are base64-encoded and require different decoding + strategies based on their field context. Attempts base64 decoding and delegates + to byte-level decoding logic, returning the original value if base64 decoding fails. + """ + try: + decoded_bytes = base64.b64decode(value, validate=True) + return self._decode_bytes_by_key(decoded_bytes, key, parent_key) + except ValueError: + return value + + def _decode_bytes_by_key(self, decoded_bytes, key, parent_key=""): + """Core decoding logic that transforms raw bytes based on field name context. + Different fields require different decoding strategies. + Contract code needs UTF-8 decoding, results should be hex-encoded, args need storage slot decoding, + calldata requires GenVM decoding, and storage slots need specialized handling. Uses + field names and parent context to determine the most appropriate decoding method. + """ + if key in ["contract_code", "code"]: + try: + return decoded_bytes.decode("utf-8") + except UnicodeDecodeError: + if len(decoded_bytes) >= 2 and decoded_bytes[:2] == b"PK": + # Keep ZIP files as base64 + return base64.b64encode(decoded_bytes).decode("ascii") + return decoded_bytes.hex() + + if key == "result": + return decoded_bytes.hex() + + if key == "args": + return self._decode_storage_slot(decoded_bytes) + + if key == "calldata": + try: + result = calldata.decode(decoded_bytes) + # Convert non-serializable objects to strings for JSON compatibility + return self._convert_non_serializable_objects(result) + except Exception: + return self._decode_storage_slot(decoded_bytes) + + # Contract state values - only decode GenVM contract code, leave others as base64 because we do not know the type + if parent_key in [ + "contract_state", + "accepted", + "finalized", + ] or self._is_storage_slot_key(key): + try: + result = calldata.decode(decoded_bytes) + if ( + isinstance(result, str) + and len(result) > 10 + and any( + keyword in result + for keyword in ["class", "def", "import", "from", "#"] + ) + ): + return result + except Exception: + # For contract code with GenVM prefixes, try manual UTF-8 decode after 4-byte header + # Only do this if we detect actual GenVM prefixes (0x00 0x02, 0x01 0x02, 0xf4, 0xf5, etc.) + if len(decoded_bytes) >= 4: + first_bytes = decoded_bytes[:4] + # Check for known GenVM prefixes + if ( + first_bytes[:2] == b"\x00\x02" + or first_bytes[:2] == b"\x01\x02" + or first_bytes[0] in (0xF4, 0xF5) + ): + try: + result = decoded_bytes[4:].decode("utf-8") + if len(result) > 10 and any( + keyword in result + for keyword in ["class", "def", "import", "from", "#"] + ): + return result + except UnicodeDecodeError: + pass + + return base64.b64encode(decoded_bytes).decode("ascii") + + return decoded_bytes.hex() + + def _is_storage_slot_key(self, key): + """Identifies storage slot keys to apply appropriate decoding strategies. + Storage slot keys are 32-byte hashes encoded as base64 and need different + handling than regular field names. Validates key format by checking length + constraints and attempting base64 decoding to confirm it represents a + 32-byte hash value. + """ + if not key or len(key) < 40: + return False + + if key.endswith("=") and len(key) in [44, 43]: + try: + decoded = base64.b64decode(key, validate=True) + return len(decoded) == 32 + except ValueError: + return False + + return False + + def _decode_storage_slot(self, decoded_bytes): + """Specialized decoder for storage slot data in args and calldata fields. + Storage slots can contain GenVM-encoded data, plain text, or binary data. + Attempts multiple decoding strategies in order of likelihood: GenVM decoding + for structured data, UTF-8 text decoding for readable content, and fallback + encoding for binary data that cannot be meaningfully decoded. + """ + if not decoded_bytes: + return "" + + genvm_result = self._try_decode_as_genvm(decoded_bytes) + if genvm_result is not None: + return genvm_result + + text_result = self._try_decode_as_text(decoded_bytes) + if text_result is not None: + return text_result + + return self._fallback_encoding(decoded_bytes) + + def _try_decode_as_genvm(self, decoded_bytes): + """Attempts to decode GenVM-encoded data using format-specific prefixes. + GenVM uses specific encoding formats that require special handling for + smart contract data. Checks for known GenVM prefixes and uses the calldata + decoder, with fallback logic to skip headers and decode as UTF-8 for + contract code when standard GenVM decoding fails. + """ + if len(decoded_bytes) < 4: + return None + + if decoded_bytes[:2] == b"\x00\x02" or decoded_bytes[0] in (0xF4, 0xF5): + try: + return calldata.decode(decoded_bytes) + except Exception: + if decoded_bytes[:2] == b"\x00\x02": + try: + return decoded_bytes[4:].decode("utf-8") + except UnicodeDecodeError: + pass + + return None + + def _try_decode_as_text(self, decoded_bytes): + """Attempts UTF-8 text decoding with readability validation. + Some data is plain text that has been base64 encoded during transmission. + Performs UTF-8 decoding and validates that the resulting text contains only + printable characters to avoid displaying binary garbage as text. + """ + try: + text = decoded_bytes.decode("utf-8") + return text if self._is_readable_text(text) else None + except UnicodeDecodeError: + return None + + def _fallback_encoding(self, decoded_bytes): + """Final fallback encoding when other decoding methods fail. + Ensures meaningful output is always returned even for unrecognized binary data. + Uses hex encoding for short data (8 bytes or less) for readability, and + base64 encoding for longer data to maintain compactness while preserving + the original information. + """ + if len(decoded_bytes) <= 8: + return decoded_bytes.hex() + else: + return base64.b64encode(decoded_bytes).decode("ascii") + + def _is_readable_text(self, text): + """Validates that decoded text contains only human-readable characters. + Prevents displaying binary garbage as text in log output by ensuring + all characters are either printable or acceptable whitespace (newlines, + carriage returns, tabs). Returns false for empty strings or text + containing non-printable control characters. + """ + if not text: + return False + + for char in text: + if not (char.isprintable() or char in "\n\r\t"): + return False + + return True + + def _try_decode_json_with_data_fields(self, json_string): + """Parses JSON strings and recursively decodes embedded 'data' fields. + Some fields contain JSON with base64-encoded 'data' fields that need + specialized decoding. Attempts JSON parsing and recursively processes + the structure to decode any 'data' fields, returning the original + string if JSON parsing fails or the content is not a dictionary. + """ + try: + parsed = json.loads(json_string) + + if isinstance(parsed, dict): + return self._decode_json_data_fields(parsed) + else: + return json_string + + except (json.JSONDecodeError, TypeError): + return json_string + + def _decode_json_data_fields(self, obj): + """Recursively processes JSON structures to decode GenVM-encoded 'data' fields. + 'data' fields in JSON are typically GenVM-encoded and require special handling + for readability. Traverses the JSON structure recursively, identifies 'data' + fields, attempts base64 decoding followed by GenVM decoding, and handles + partial decode errors by extracting meaningful content from error messages. + """ + if isinstance(obj, dict): + result = {} + for k, v in obj.items(): + if k == "data" and isinstance(v, str): + try: + decoded_bytes = base64.b64decode(v, validate=True) + genvm_result = calldata.decode(decoded_bytes) + result[k] = genvm_result + except Exception as e: + # Handle partial decode errors where content is in error message + error_msg = str(e) + if "decoded" in error_msg and "unparsed end" in error_msg: + try: + start = error_msg.find("(decoded ") + 9 + end = error_msg.rfind(")") + if start > 8 and end > start: + result[k] = error_msg[start:end] + else: + result[k] = v + except Exception: + result[k] = v + else: + result[k] = v + else: + result[k] = self._decode_json_data_fields(v) + return result + elif isinstance(obj, list): + return [self._decode_json_data_fields(item) for item in obj] + else: + return obj + + def _convert_non_serializable_objects(self, obj): + """Converts objects that cannot be JSON serialized into string representations. + The logging system requires all data to be JSON serializable for transmission + and storage. Recursively processes data structures to convert memoryview objects + to hex strings, Address objects to their string representation, and handles + non-ASCII strings that might cause JSON serialization issues. + """ + if isinstance(obj, memoryview): + return f"b#{bytes(obj).hex()}" + elif hasattr(obj, "__class__") and obj.__class__.__name__ == "Address": + # Handle Address objects (they use __slots__ so no __dict__) + return str(obj) # Uses __repr__ which returns "addr#..." format + elif isinstance(obj, dict): + return { + k: self._convert_non_serializable_objects(v) for k, v in obj.items() + } + elif isinstance(obj, list): + return [self._convert_non_serializable_objects(item) for item in obj] + elif isinstance(obj, str): + # Check if string contains non-ASCII characters that might cause JSON issues + try: + obj.encode("ascii") + return obj # ASCII string is fine + except UnicodeEncodeError: + # Non-ASCII string, convert to hex if it looks like decoded binary data + if len(obj) <= 4 and any(ord(c) > 127 for c in obj): + return obj.encode("utf-8").hex() + return obj # Keep longer text as-is + else: + return obj + + def _apply_log_level_truncation(self, data, max_length=200): + """Main orchestrator for decoding and truncating log data for optimal readability. + Transforms raw binary/encoded data into human-readable formats while managing + log verbosity. Applies comprehensive decoding to make data meaningful, ensures + all objects are JSON serializable, and optionally truncates verbose output + based on log level configuration to balance detail with readability. + """ + decoded_data = self._decode_value(data) + decoded_data = self._convert_non_serializable_objects(decoded_data) + # Only truncate if not in DEBUG mode should_truncate = os.environ.get("LOG_LEVEL", "INFO").upper() != "DEBUG" - if not should_truncate or not isinstance(data, dict): - return data + if not should_truncate or not isinstance(decoded_data, dict): + return decoded_data - truncated_data = copy.deepcopy(data) + truncated_data = copy.deepcopy(decoded_data) self._truncate_dict(truncated_data, max_length) return truncated_data diff --git a/tests/unit/test_message_handler_decoding.py b/tests/unit/test_message_handler_decoding.py new file mode 100644 index 000000000..2d9d925bb --- /dev/null +++ b/tests/unit/test_message_handler_decoding.py @@ -0,0 +1,742 @@ +import pytest +import base64 +import struct +import json +from backend.protocol_rpc.message_handler.base import MessageHandler +from backend.protocol_rpc.configuration import GlobalConfiguration +from flask_socketio import SocketIO +from backend.node.types import Address + + +class TestMessageHandlerDecoding: + """Test the decoding functionality in MessageHandler.""" + + @pytest.fixture + def message_handler(self): + """Create a MessageHandler instance for testing.""" + socketio = SocketIO() + config = GlobalConfiguration() + return MessageHandler(socketio, config) + + @pytest.fixture(autouse=True) + def disable_truncation(self, monkeypatch): + """Disable log truncation for all tests in this class to focus on decoding logic.""" + monkeypatch.setenv("LOG_LEVEL", "DEBUG") + + def test_contract_code_decoding(self, message_handler): + """Test that contract code is decoded to readable Python.""" + # Plain UTF-8 contract code + contract_code = '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n\nfrom genlayer import *\n\nclass Storage(gl.Contract):\n storage: str' + b64_encoded = base64.b64encode(contract_code.encode("utf-8")).decode("ascii") + + # Test in contract_code context + test_data = {"contract_code": b64_encoded} + result = message_handler._decode_value(test_data) + assert result["contract_code"] == contract_code + + def test_genvm_encoded_contract_code(self, message_handler): + """Test that GenVM-encoded contract code with headers is decoded correctly.""" + # Contract code with GenVM header (0xf5 prefix) + contract_code = ( + '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n\nfrom genlayer import *' + ) + genvm_data = b"\xf5\x01\x00\x00" + contract_code.encode("utf-8") + b64_encoded = base64.b64encode(genvm_data).decode("ascii") + + # Test in contract_state context where GenVM decoding should happen + test_data = {"contract_state": {"code_slot": b64_encoded}} + result = message_handler._decode_value(test_data) + assert result["contract_state"]["code_slot"] == contract_code + + def test_32_byte_storage_remains_hex(self, message_handler): + """Test that 32-byte storage values remain as hex (no integer decoding).""" + test_cases = [ + # (description, binary_data) + ("1 as 32-byte", b"\x01" + b"\x00" * 31), + ("20 as 32-byte", b"\x14" + b"\x00" * 31), + ("34 as 32-byte", b"\x22" + b"\x00" * 31), + ("255 as 32-byte", b"\xff" + b"\x00" * 31), + ("256 as 32-byte", b"\x00\x01" + b"\x00" * 30), + ] + + for description, binary_data in test_cases: + b64_encoded = base64.b64encode(binary_data).decode("ascii") + test_data = {"contract_state": {"storage_slot": b64_encoded}} + result = message_handler._decode_value(test_data) + # Storage slots remain as base64 + assert ( + result["contract_state"]["storage_slot"] == b64_encoded + ), f"{description}: Expected {b64_encoded}, got {result['contract_state']['storage_slot']}" + + def test_small_binary_data_as_hex(self, message_handler): + """Test that small binary data is shown as hex.""" + test_cases = [ + (b"\x00\x00", "0000"), + (b"\xff\xfe", "fffe"), + (b"\x01\x02\x03\x04", "01020304"), + ] + + for binary_data, expected_hex in test_cases: + b64_encoded = base64.b64encode(binary_data).decode("ascii") + # Test in result context where hex decoding should happen + test_data = {"result": b64_encoded} + result = message_handler._decode_value(test_data) + assert result["result"] == expected_hex + + def test_empty_strings(self, message_handler): + """Test that empty strings are handled correctly.""" + result = message_handler._decode_value("") + assert result == "" + + # Empty base64 data + empty_b64 = base64.b64encode(b"").decode( + "ascii" + ) # This would be empty or invalid + result = message_handler._decode_value(empty_b64) + # Should either be empty string or the original value + assert result in ("", empty_b64) + + def test_non_base64_strings(self, message_handler): + """Test that non-base64 strings are left unchanged.""" + test_cases = [ + "hello world", + "not-base64!", + "transaction_hash", + "0x1234567890abcdef", + ] + + for test_string in test_cases: + result = message_handler._decode_value(test_string) + assert result == test_string + + def test_hex_decoding(self, message_handler): + """Test that hex strings are not decoded without proper context.""" + # Hex strings without context should remain unchanged + hex_string = "48656c6c6f" # "Hello" in hex + result = message_handler._decode_value(hex_string) + assert result == hex_string # Should remain unchanged without context + + # Invalid hex should be left unchanged + invalid_hex = "48656c6c6g" # Contains 'g' + result = message_handler._decode_value(invalid_hex) + assert result == invalid_hex + + def test_nested_data_structures(self, message_handler): + """Test that nested dictionaries and lists are processed recursively.""" + test_data = { + "contract_state": { + "storage_key_1": "MQ==", + "storage_key_2": base64.b64encode(b"\x22" + b"\x00" * 31).decode( + "ascii" + ), + "empty_key": "", + }, + "calldata": { + "args": ["MQ==", "MTIz"], # Should decode to ["1", "123"] + }, + "result": "AAA=", # Should decode to "0000" + } + + result = message_handler._decode_value(test_data) + + # Storage slots should remain as base64 (no integer decoding) + assert result["contract_state"]["storage_key_1"] == "MQ==" # "1" as base64 + assert result["contract_state"]["storage_key_2"] == base64.b64encode( + b"\x22" + b"\x00" * 31 + ).decode( + "ascii" + ) # 34 as base64 (64 chars) + assert result["contract_state"]["empty_key"] == "" + assert result["calldata"]["args"] == ["1", "123"] + assert result["result"] == "0000" + + def test_key_based_decoding_rules(self, message_handler): + """Test hardcoded key-based decoding rules.""" + # Contract code should always be decoded as UTF-8 + contract_code = ( + "# v0.1.0\nfrom genlayer import *\nclass Storage(gl.Contract):\n pass" + ) + contract_code_b64 = base64.b64encode(contract_code.encode("utf-8")).decode( + "ascii" + ) + + test_data = { + "contract_code": contract_code_b64, + "code": contract_code_b64, + "contract_state": { + "code_slot": contract_code_b64, + "storage_slot": base64.b64encode(b"\x2a" + b"\x00" * 31).decode( + "ascii" + ), + }, + "result": base64.b64encode(b"\x00\x00\x00\x00").decode("ascii"), + "calldata": "DgRhcmdzDZEC", + } + + result = message_handler._decode_value(test_data) + + # Contract code fields should be decoded + assert result["contract_code"] == contract_code + assert result["code"] == contract_code + + # Contract state code slot should remain as base64 (not decoded unless it has GenVM headers) + code_slot_result = result["contract_state"]["code_slot"] + assert code_slot_result == contract_code_b64 + + # Storage slot should also remain as base64 + assert result["contract_state"]["storage_slot"] == base64.b64encode( + b"\x2a" + b"\x00" * 31 + ).decode( + "ascii" + ) # 42 as base64 + + # Result should be hex + assert result["result"] == "00000000" + + # Calldata should be decoded to readable format + assert result["calldata"] == {"args": [34]} + + def test_direct_key_path_decoding(self, message_handler): + """Test that we decode directly based on key paths without guessing.""" + test_data = { + "args": [ + base64.b64encode(b"1").decode("ascii"), # Simple string + base64.b64encode(b"\x7b" + b"\x00" * 31).decode( + "ascii" + ), # 123 as 32-byte integer + ] + } + + result = message_handler._decode_value(test_data) + + # Args should be decoded by storage slot logic (text and GenVM, but not integers) + assert result["args"][0] == "1" # UTF-8 text + # The 32-byte integer should fallback to base64 since it's not readable text or GenVM + assert result["args"][1] == "ewAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" + + def test_is_readable_text(self, message_handler): + """Test the _is_readable_text helper method.""" + # Readable text + assert message_handler._is_readable_text("Hello World") + assert message_handler._is_readable_text("# v0.1.0\nfrom genlayer import *") + assert message_handler._is_readable_text("class Storage:\n pass") + + # Non-readable text + assert not message_handler._is_readable_text("") + assert not message_handler._is_readable_text("Hello\x00World") + assert not message_handler._is_readable_text("\x01\x02\x03") + + def test_storage_slot_edge_cases(self, message_handler): + """Test edge cases in storage slot decoding.""" + # Empty bytes in storage context + test_data = { + "contract_state": {"empty_slot": base64.b64encode(b"").decode("ascii")} + } + result = message_handler._decode_value(test_data) + assert result["contract_state"]["empty_slot"] == "" + + # Large text data in contract_state should remain as base64 (conservative approach) + large_data = b"x" * 100 + b64_encoded = base64.b64encode(large_data).decode("ascii") + test_data = {"contract_state": {"large_slot": b64_encoded}} + result = message_handler._decode_value(test_data) + assert result["contract_state"]["large_slot"] == b64_encoded + + # 32-byte data with all zeros should remain as base64 + all_zeros = b"\x00" * 32 + b64_encoded = base64.b64encode(all_zeros).decode("ascii") + test_data = {"contract_state": {"zero_slot": b64_encoded}} + result = message_handler._decode_value(test_data) + assert result["contract_state"]["zero_slot"] == b64_encoded + + def test_real_log_data(self, message_handler): + """Test with actual data from real logs.""" + real_data = { + "contract_state": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "9QEAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0K", + "IbngE/dGCLkpR4YSh7PedsLAdv6Dm3mUdhvZUMwudWY=": "", + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=": "IgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=", + }, + "calldata": "DgRhcmdzDZEC", + "result": "AAA=", + } + + result = message_handler._decode_value(real_data) + + # Contract code should be decoded + contract_code = result["contract_state"][ + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=" + ] + assert contract_code == '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n' + + # Empty value should stay empty + assert ( + result["contract_state"]["IbngE/dGCLkpR4YSh7PedsLAdv6Dm3mUdhvZUMwudWY="] + == "" + ) + + # Storage value should remain as base64 (no integer decoding) + storage_value = result["contract_state"][ + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=" + ] + assert ( + storage_value == "IgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" + ) # 34 as base64 + + # Result should be hex + assert result["result"] == "0000" + + # Calldata should be decoded to readable format + assert result["calldata"] == {"args": [34]} + + def test_memoryview_handling(self, message_handler): + """Test that memoryview objects are handled correctly.""" + test_data = memoryview(b"Hello World") + # Without context, memoryview should be converted to hex + result = message_handler._decode_value(test_data) + assert result == "48656c6c6f20576f726c64" # hex representation + assert isinstance(result, str) + + def test_bytes_handling(self, message_handler): + """Test that bytes objects are handled correctly.""" + test_data = b"Hello World" + # Without context, bytes should be converted to hex + result = message_handler._decode_value(test_data) + assert result == "48656c6c6f20576f726c64" # hex representation + assert isinstance(result, str) + + # Binary bytes should be shown as hex + binary_data = b"\x01\x02\x03\x04" + result = message_handler._decode_value(binary_data) + assert result == "01020304" + + def test_other_types_passthrough(self, message_handler): + """Test that other data types are passed through unchanged.""" + test_cases = [ + 42, + True, + False, + None, + ] + + for test_value in test_cases: + result = message_handler._decode_value(test_value) + assert result == test_value + assert type(result) == type(test_value) + + def test_invalid_base64_handling(self, message_handler): + """Test that invalid base64 strings are handled gracefully.""" + # Invalid base64 string (369 characters, not multiple of 4) + invalid_b64 = "a" * 369 # This would cause the error from the logs + + test_cases = [ + ("contract_code", invalid_b64), + ("contract_state", {"some_key": invalid_b64}), + ("result", invalid_b64), + ] + + for key, value in test_cases: + test_data = {key: value} + result = message_handler._decode_value(test_data) + # Should return original value when base64 decoding fails + if key == "contract_state": + assert result[key]["some_key"] == invalid_b64 + else: + assert result[key] == value + + def test_real_state_decoding(self, message_handler): + """Test decoding with the actual log.""" + real_log_data = { + "data": { + "state": { + "accepted": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "9AEAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0K", + "v96pn90vMq46SFcdUOno3Af0EMen6CFMDp9zUiNNU5Y=": "MQ==", + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=": "AQAAAA==", + }, + "finalized": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "9AEAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0K", + }, + }, + "calldata": "DgRhcmdzDQwx", + } + } + + assert message_handler._decode_value(real_log_data) == { + "data": { + "state": { + "accepted": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n', + "v96pn90vMq46SFcdUOno3Af0EMen6CFMDp9zUiNNU5Y=": "MQ==", + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=": "AQAAAA==", + }, + "finalized": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n', + }, + }, + "calldata": {"args": ["1"]}, + } + } + + def test_storage_slot_key_detection(self, message_handler): + """Test that storage slot keys are correctly identified.""" + # Real storage slot keys from the logs (32-byte hashes encoded as base64) + storage_keys = [ + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=", # 44 chars with = + "v96pn90vMq46SFcdUOno3Af0EMen6CFMDp9zUiNNU5Y=", # 44 chars with = + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=", # 44 chars with = + ] + + # Non-storage keys + non_storage_keys = [ + "contract_code", + "calldata", + "result", + "short_key", + "invalid_base64_key!", + ] + + for key in storage_keys: + assert message_handler._is_storage_slot_key( + key + ), f"Should detect {key} as storage slot key" + + for key in non_storage_keys: + assert not message_handler._is_storage_slot_key( + key + ), f"Should NOT detect {key} as storage slot key" + + def test_contract_code_in_state_decoding(self, message_handler): + """Test that contract code in state with 0x00 0x02 format is decoded.""" + contract_code_with_header = ( + "AAIAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0K" + ) + + test_data = { + "contract_state": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": contract_code_with_header + } + } + + result = message_handler._decode_value(test_data) + + decoded_code = result["contract_state"][ + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=" + ] + assert decoded_code == '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n' + + def test_already_decoded_log(self, message_handler): + """Test with the already decoded log.""" + transaction_log = { + "transaction": { + "data": { + "calldata": {"args": [[1, 3]]}, + "contract_code": '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n\nfrom genlayer import *\n\n\n# contract class\nclass Storage(gl.Contract):\n storage: DynArray[u256]\n\n # constructor\n def __init__(self, initial_storage: list):\n self.storage = initial_storage', + "contract_address": "0x513984320146324dd1A8b7D6E25FAf9251050E76", + } + } + } + + result = message_handler._decode_value(transaction_log) + + # Contract code should remain readable + assert "# v0.1.0" in result["transaction"]["data"]["contract_code"] + assert "class Storage" in result["transaction"]["data"]["contract_code"] + + # Calldata should remain as structured data + assert result["transaction"]["data"]["calldata"] == {"args": [[1, 3]]} + + def test_consensus_log(self, message_handler): + """Test with the consensus log.""" + consensus_log = { + "consensus_data": { + "leader_receipt": [ + { + "calldata": {"args": [[1, 2]]}, # Already decoded + "contract_state": { + # Contract code with 0x00 0x02 prefix + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "AAIAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0KCmZyb20gZ2VubGF5ZXIgaW1wb3J0ICoKCgojIGNvbnRyYWN0IGNsYXNzCmNsYXNzIFN0b3JhZ2UoZ2wuQ29udHJhY3QpOgogICAgc3RvcmFnZTogRHluQXJyYXlbdTI1Nl0=", + # Empty storage slot + "IbngE/dGCLkpR4YSh7PedsLAdv6Dm3mUdhvZUMwudWY=": "", + # Large binary storage slot + "ugBWMHRazzAUqvFi6ZMwQDAsoL7z9W/i1zwKCPgsYQs=": "BAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhee6AFYwdFrPMBSq8WLpkzBAMCygvvP1b+LXPAoI+CxhCyG54BP3Rgi5KUeGEoez3nbCwHb+g5t5lHYb2VDMLnVm", + # 4-byte integer (base64 encoded) + "Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o=": "AgAAAA==", + # 64-byte storage slot with array data (contains [1, 2]) + "v96pn90vMq46SFcdUOno3Af0EMen6CFMDp9zUiNNU5Y=": "AQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==", + }, + } + ] + } + } + + result = message_handler._decode_value(consensus_log) + + # Contract code in state should now be decoded + contract_state = result["consensus_data"]["leader_receipt"][0]["contract_state"] + contract_code = contract_state["4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec="] + assert "# v0.1.0" in contract_code + assert "class Storage" in contract_code + assert "DynArray[u256]" in contract_code + + # Empty storage should stay empty + assert contract_state["IbngE/dGCLkpR4YSh7PedsLAdv6Dm3mUdhvZUMwudWY="] == "" + + # Large binary data should be returned as base64 (too large to decode meaningfully) + large_data = contract_state["ugBWMHRazzAUqvFi6ZMwQDAsoL7z9W/i1zwKCPgsYQs="] + assert isinstance(large_data, str) + assert len(large_data) > 50 + + # Storage slots should remain as base64 (no integer decoding) + assert ( + contract_state["Ny1Gw62p+JfHTTSbv+DkUMeYFnyfWA+Nr4Xe9X6Ww+o="] == "AgAAAA==" + ) + + # 64-byte storage slot should remain as base64 (no array decoding) + array_storage = contract_state["v96pn90vMq46SFcdUOno3Af0EMen6CFMDp9zUiNNU5Y="] + assert ( + array_storage + == "AQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" + ) + + # Calldata should remain structured + assert result["consensus_data"]["leader_receipt"][0]["calldata"] == { + "args": [[1, 2]] + } + + def test_genvm_with_state_log(self, message_handler): + """Test with the genvm log.""" + genvm_log = { + "data": { + "state": { + "accepted": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "AAIAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0KCmZyb20gZ2VubGF5ZXIgaW1wb3J0ICoKCgojIGNvbnRyYWN0IGNsYXNzCmNsYXNzIFN0b3JhZ2UoZ2wuQ29udHJhY3QpOgogICAgc3RvcmFnZTogRHluQXJyYXlbdTI1Nl0=" + }, + "finalized": { + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=": "AAIAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0KCmZyb20gZ2VubGF5ZXIgaW1wb3J0ICoKCgojIGNvbnRyYWN0IGNsYXNzCmNsYXNzIFN0b3JhZ2UoZ2wuQ29udHJhY3QpOgogICAgc3RvcmFnZTogRHluQXJyYXlbdTI1Nl0=" + }, + }, + "code": '# v0.1.0\n# { "Depends": "py-genlayer:latest" }\n\nfrom genlayer import *\n\n\n# contract class\nclass Storage(gl.Contract):\n storage: DynArray[u256]', + } + } + + result = message_handler._decode_value(genvm_log) + + accepted_code = result["data"]["state"]["accepted"][ + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=" + ] + assert "# v0.1.0" in accepted_code + assert "DynArray[u256]" in accepted_code + + finalized_code = result["data"]["state"]["finalized"][ + "4a4jQSeS32tqmPt8mDlwH7iwK2/H7QIoEPeDRklGhec=" + ] + assert "# v0.1.0" in finalized_code + assert "DynArray[u256]" in finalized_code + + assert "# v0.1.0" in result["data"]["code"] + assert "DynArray[u256]" in result["data"]["code"] + + def test_various_calldata_formats(self, message_handler): + """Test different calldata formats.""" + + test_cases = [ + ("DgRhcmdzDQwx", {"args": ["1"]}), + ("DgRhcmdzDZEC", {"args": [34]}), + ("DgRhcmdzDRUMMQwy", {"args": [["1", "2"]]}), + ("DgRhcmdzDRUJEQ==", {"args": [[1, 2]]}), + ("DgRhcmdzDQg=", {"args": [False]}), + ("DgRhcmdzDRA=", {"args": [True]}), + ( + "DgRhcmdzDRgAAAAAAAAAAAAAAAAAAAAAAAAAAA==", + {"args": ["addr#0000000000000000000000000000000000000000"]}, + ), + ("DgRhcmdzDRPerQ==", {"args": ["b#dead"]}), + ] + + for calldata_b64, expected_result in test_cases: + test_data = {"calldata": calldata_b64} + result = message_handler._decode_value(test_data) + assert result["calldata"] == expected_result, f"Failed for {calldata_b64}" + + def test_genvm_format_detection(self, message_handler): + """Test that different GenVM formats are correctly detected and decoded.""" + # Test 0x00 0x02 format (contract code in state) + contract_code_0002 = ( + "AAIAACMgdjAuMS4wCiMgeyAiRGVwZW5kcyI6ICJweS1nZW5sYXllcjpsYXRlc3QiIH0K" + ) + test_data = {"contract_state": {"code_key": contract_code_0002}} + result = message_handler._decode_value(test_data) + assert "# v0.1.0" in result["contract_state"]["code_key"] + + # Test regular base64 without special headers (should try GenVM first for calldata) + calldata_no_header = ( + "DgRhcmdzDQwx" # This doesn't start with 0xf4/0xf5 or 0x00/0x02 + ) + test_data = {"calldata": calldata_no_header} + result = message_handler._decode_value(test_data) + assert result["calldata"] == {"args": ["1"]} # Should still decode via GenVM + + def test_storage_slots_remain_as_hex(self, message_handler): + """Test that storage slots are not decoded as integers.""" + test_cases = [ + ("Single byte", b"\x01"), + ("Two bytes", bytes.fromhex("fbff")), + ("Four bytes", struct.pack(" 0 + + # Verify the decoded content makes sense (storage slots remain as base64) + assert ( + result["contract_state"]["storage_key"] == "3q0=" + ) # Base64 representation of b'\xde\xad' + + # Test args case - should be decoded and converted to hex if non-ASCII + args_data = {"calldata": {"args": [memoryview(b"\xde\xad")]}} + args_result = message_handler._decode_value(args_data) + args_final = message_handler._convert_non_serializable_objects(args_result) + assert args_final["calldata"]["args"][0] == "dead" + + def test_address_objects_handling(self, message_handler): + """Test that Address objects are properly converted for JSON serialization.""" + zero_address = Address("0x0000000000000000000000000000000000000000") + test_address = Address("0x27faa0498AdfdF9D10E160BEe8Db1f95703f4cBf") + + test_data = { + "contract_state": { + "address_key": zero_address, + "another_address": test_address, + "nested": {"inner_address": zero_address}, + }, + "calldata": {"args": [zero_address, test_address]}, + "result": test_address, + } + + # This should not raise an AttributeError about Address.__dict__ + result = message_handler._apply_log_level_truncation(test_data) + + # All Address objects should be converted to strings + assert isinstance(result["contract_state"]["address_key"], str) + assert isinstance(result["contract_state"]["another_address"], str) + assert isinstance(result["contract_state"]["nested"]["inner_address"], str) + assert isinstance(result["calldata"]["args"][0], str) + assert isinstance(result["calldata"]["args"][1], str) + assert isinstance(result["result"], str) + + # Verify the addresses are in the expected format + assert ( + result["contract_state"]["address_key"] + == "addr#0000000000000000000000000000000000000000" + ) + assert "addr#" in result["contract_state"]["another_address"] + assert "addr#" in result["calldata"]["args"][0] + assert "addr#" in result["calldata"]["args"][1] + + # Test that the result can be JSON serialized without errors + json_str = json.dumps(result, default=lambda o: o.__dict__) + assert len(json_str) > 0 + + # Verify the zero address case specifically + assert ( + result["calldata"]["args"][0] + == "addr#0000000000000000000000000000000000000000" + ) + + def test_data_field_decoding(self, message_handler): + """Test that 'data' fields within JSON strings are decoded to human-readable format used in [GenVM] execution finished log.""" + base64_data_1 = { + "result": '{"kind": "return", "data": "vBB7ImN0b3IiOnsicGFyYW1zIjpbWyJpbml0aWFsX3N0b3JhZ2UiLCJieXRlcyJdXSwia3dwYXJhbXMiOnt9fSwibWV0aG9kcyI6eyJnZXRfc3RvcmFnZSI6eyJwYXJhbXMiOltdLCJrd3BhcmFtcyI6e30sInJlYWRvbmx5Ijp0cnVlLCJyZXQiOiJzdHJpbmcifSwidXBkYXRlX3N0b3JhZ2UiOnsicGFyYW1zIjpbWyJuZXdfc3RvcmFnZSIsInN0cmluZyJdXSwia3dwYXJhbXMiOnt9LCJyZWFkb25seSI6ZmFsc2UsInJldCI6Im51bGwiLCJwYXlhYmxlIjpmYWxzZX19fQ=="}' + } + base64_data_2 = { + "result": '{"kind": "return", "data": "xBB7ImN0b3IiOnsicGFyYW1zIjpbWyJpbml0aWFsX3N0b3JhZ2UiLCJzdHJpbmciXV0sImt3cGFyYW1zIjp7fX0sIm1ldGhvZHMiOnsiZ2V0X3N0b3JhZ2UiOnsicGFyYW1zIjpbXSwia3dwYXJhbXMiOnt9LCJyZWFkb25seSI6dHJ1ZSwicmV0Ijoic3RyaW5nIn0sInVwZGF0ZV9zdG9yYWdlIjp7InBhcmFtcyI6W1sibmV3X3N0b3JhZ2UiLCJzdHJpbmciXV0sImt3cGFyYW1zIjp7fSwicmVhZG9ubHkiOmZhbHNlLCJyZXQiOiJudWxsIiwicGF5YWJsZSI6ZmFsc2V9fX0="}' + } + + for log_data in [base64_data_1, base64_data_2]: + result = message_handler._decode_value(log_data) + + # The result should now be a parsed object (not a JSON string) with decoded data + parsed_result = result["result"] + assert isinstance(parsed_result, dict) + assert parsed_result["kind"] == "return" + + # The data field should be decoded to readable JSON + decoded_data = parsed_result["data"] + assert isinstance(decoded_data, str) + assert "ctor" in decoded_data + assert "methods" in decoded_data + assert "get_storage" in decoded_data + assert "update_storage" in decoded_data + + def test_zip_file_contract_code_handling(self, message_handler): + """Test that ZIP files in contract_code field are kept as base64, not converted to hex.""" + zip_base64 = "UEsDBAoAAAAAAIpxDFsAAAAAAAAAAAAAAAAJAAAAY29udHJhY3QvUEsDBAoAAAAAAIpxDFuf/XMU" + + # Test direct decoding + zip_bytes = base64.b64decode(zip_base64) + result = message_handler._decode_bytes_by_key(zip_bytes, "contract_code") + + # Should remain as base64, not convert to hex + assert result == zip_base64 + assert not result.startswith("504b03040a") # Should not be hex + + # Test in full log structure + test_data = {"data": {"contract_code": zip_base64}} + + decoded_result = message_handler._decode_value(test_data) + + # Should keep ZIP as base64 in the full structure + assert decoded_result["data"]["contract_code"] == zip_base64