From b8ce45470b4a3b6b68126a2a99a66a8f44d6c917 Mon Sep 17 00:00:00 2001 From: Andrey Date: Tue, 18 Oct 2022 16:28:03 +0300 Subject: [PATCH 01/13] Add logging. --- .../mooncrawl/mooncrawl/state_crawler/cli.py | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index b3b70842..7587681e 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -5,6 +5,7 @@ import itertools import logging from typing import Dict, List, Any, Optional from uuid import UUID +import time from moonstreamdb.blockchain import AvailableBlockchainType from mooncrawl.moonworm_crawler.crawler import _retry_connect_web3 @@ -27,6 +28,9 @@ from .web3_util import FunctionSignature logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) +# Sqlalchemy session +logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) + Multicall2_address = "0xc8E51042792d7405184DfCa245F2d27B94D013b6" @@ -141,18 +145,30 @@ def crawl_calls_level( calls_of_level[i : i + batch_size] for i in range(0, len(calls_of_level), batch_size) ]: - + retry = 0 while True: try: + logger.info( + f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" + ) make_multicall_result = make_multicall( multicall_method=multicall_method, calls=call_chunk, block_number=block_number, block_timestamp=block_timestamp, ) + logger.info( + f"Multicall2 returned {len(make_multicall_result)} results at block {block_number}" + ) + retry = 0 break - except ValueError: - continue + except ValueError as e: + time.sleep(3) + logger.info(f"ValueError: {e}, retrying") + retry = +1 + if retry > 5: + raise (e) + raise (e) # results parsing and writing to database add_to_session_count = 0 for result in make_multicall_result: From 94e98a3059e6bdab8c6327deafd4001ca185b777 Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 20 Oct 2022 17:40:06 +0300 Subject: [PATCH 02/13] Add logging and thread poolexecutor. --- .../mooncrawl/mooncrawl/state_crawler/cli.py | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index 7587681e..2680e569 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -1,4 +1,6 @@ import argparse +from concurrent.futures import ThreadPoolExecutor +from concurrent.futures._base import TimeoutError import json import hashlib import itertools @@ -28,9 +30,6 @@ from .web3_util import FunctionSignature logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -# Sqlalchemy session -logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO) - Multicall2_address = "0xc8E51042792d7405184DfCa245F2d27B94D013b6" @@ -148,27 +147,36 @@ def crawl_calls_level( retry = 0 while True: try: + logger.info( f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" ) - make_multicall_result = make_multicall( - multicall_method=multicall_method, - calls=call_chunk, - block_number=block_number, - block_timestamp=block_timestamp, - ) + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit( + make_multicall, + multicall_method, + call_chunk, + block_timestamp, + block_number, + ) + make_multicall_result = future.result(timeout=20) logger.info( f"Multicall2 returned {len(make_multicall_result)} results at block {block_number}" ) retry = 0 break except ValueError as e: - time.sleep(3) logger.info(f"ValueError: {e}, retrying") retry = +1 if retry > 5: raise (e) - raise (e) + except TimeoutError as e: + logger.info(f"TimeoutError: {e}, retrying") + retry = +1 + if retry > 5: + raise (e) + time.sleep(2) + # results parsing and writing to database add_to_session_count = 0 for result in make_multicall_result: @@ -469,7 +477,7 @@ def main() -> None: "--batch-size", "-s", type=int, - default=500, + default=1000, help="Size of chunks wich send to Multicall2 contract.", ) view_state_crawler_parser.set_defaults(func=handle_crawl) From c580a7d3c2cb85ea761dbdaf68d188190eea3ef1 Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 20 Oct 2022 21:20:01 +0300 Subject: [PATCH 03/13] Add jobs. --- .../mooncrawl/mooncrawl/state_crawler/cli.py | 139 ++++++++++++++---- 1 file changed, 110 insertions(+), 29 deletions(-) diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index 2680e569..540c5d4e 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -365,38 +365,119 @@ def handle_crawl(args: argparse.Namespace) -> None: Read all view methods of the contracts and crawl """ - my_job = { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "tokenURI", - "outputs": [{"internalType": "string", "name": "", "type": "string"}], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - } + my_jobs = [ + { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "tokenURI", + "outputs": [{"internalType": "string", "name": "", "type": "string"}], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getStats", + "outputs": [ + {"internalType": "uint256", "name": "attack", "type": "uint256"}, + {"internalType": "uint256", "name": "accuracy", "type": "uint256"}, + {"internalType": "uint256", "name": "movementSpeed", "type": "uint256"}, + {"internalType": "uint256", "name": "attackSpeed", "type": "uint256"}, + {"internalType": "uint256", "name": "defense", "type": "uint256"}, + {"internalType": "uint256", "name": "vitality", "type": "uint256"}, + {"internalType": "uint256", "name": "resistance", "type": "uint256"}, + {"internalType": "uint256", "name": "magic", "type": "uint256"}, + ], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + ] blockchain_type = AvailableBlockchainType(args.blockchain) parse_jobs( - [my_job], blockchain_type, args.block_number, args.batch_size, args.access_id + my_jobs, blockchain_type, args.block_number, args.batch_size, args.access_id ) @@ -477,7 +558,7 @@ def main() -> None: "--batch-size", "-s", type=int, - default=1000, + default=500, help="Size of chunks wich send to Multicall2 contract.", ) view_state_crawler_parser.set_defaults(func=handle_crawl) From 15126c508669a3a0b80f56469552bb8d63041cf2 Mon Sep 17 00:00:00 2001 From: Andrey Date: Wed, 26 Oct 2022 14:48:30 +0300 Subject: [PATCH 04/13] Add autoscale batch size. --- backend/moonstreamapi/api.py | 1 + .../mooncrawl/mooncrawl/state_crawler/cli.py | 192 ++++++++++++------ 2 files changed, 132 insertions(+), 61 deletions(-) diff --git a/backend/moonstreamapi/api.py b/backend/moonstreamapi/api.py index 0ba1fcee..d165c416 100644 --- a/backend/moonstreamapi/api.py +++ b/backend/moonstreamapi/api.py @@ -33,6 +33,7 @@ tags_metadata = [ }, {"name": "dashboards", "description": "Operations with user dashboards."}, {"name": "queries", "description": "Operations with user queries."}, + {"name": "public", "description": "Operations with public endpoints."}, {"name": "streams", "description": "Operations with data streams and filters."}, {"name": "subscriptions", "description": "Operations with user subscriptions."}, {"name": "time", "description": "Server timestamp endpoints."}, diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index 540c5d4e..b787c36b 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -41,13 +41,31 @@ def make_multicall( block_number: str = "latest", ) -> Any: - multicall_calls = [ - ( - call["address"], - call["method"].encode_data(call["inputs"]).hex(), - ) - for call in calls - ] + # multicall_calls = [ + # ( + # call["address"], + # call["method"].encode_data(call["inputs"]).hex(), + # ) + # for call in calls + # ] + + multicall_calls = [] + + # Remove! + logger.info(f"multicall for methods {set([call['method'].name for call in calls])}") + + for call in calls: + try: + multicall_calls.append( + ( + call["address"], + call["method"].encode_data(call["inputs"]).hex(), + ) + ) + except Exception as e: + logger.error( + f'Error encoding data for method {call["method"].name} call: {call}' + ) multicall_result = multicall_method(False, calls=multicall_calls).call( block_identifier=block_number @@ -57,10 +75,41 @@ def make_multicall( # Handle the case with not successful calls for index, encoded_data in enumerate(multicall_result): - if encoded_data[0]: + try: + if encoded_data[0]: + results.append( + { + "result": calls[index]["method"].decode_data(encoded_data[1]), + "hash": calls[index]["hash"], + "method": calls[index]["method"], + "address": calls[index]["address"], + "name": calls[index]["method"].name, + "inputs": calls[index]["inputs"], + "call_data": multicall_calls[index][1], + "block_number": block_number, + "block_timestamp": block_timestamp, + "status": encoded_data[0], + } + ) + else: + results.append( + { + "result": calls[index]["method"].decode_data(encoded_data[1]), + "hash": calls[index]["hash"], + "method": calls[index]["method"], + "address": calls[index]["address"], + "name": calls[index]["method"].name, + "inputs": calls[index]["inputs"], + "call_data": multicall_calls[index][1], + "block_number": block_number, + "block_timestamp": block_timestamp, + "status": encoded_data[0], + } + ) + except Exception as e: results.append( { - "result": calls[index]["method"].decode_data(encoded_data[1])[0], + "result": str(encoded_data[1]), "hash": calls[index]["hash"], "method": calls[index]["method"], "address": calls[index]["address"], @@ -70,23 +119,16 @@ def make_multicall( "block_number": block_number, "block_timestamp": block_timestamp, "status": encoded_data[0], + "error": str(e), } ) - else: - results.append( - { - "result": calls[index]["method"].decode_data(encoded_data[1]), - "hash": calls[index]["hash"], - "method": calls[index]["method"], - "address": calls[index]["address"], - "name": calls[index]["method"].name, - "inputs": calls[index]["inputs"], - "call_data": multicall_calls[index][1], - "block_number": block_number, - "block_timestamp": block_timestamp, - "status": encoded_data[0], - } + + logger.error( + f"Error decoding data for for method {call['method'].name} call {calls[index]}: {e}." ) + # data is not decoded, return the encoded data + logger.error(f"Encoded data: {encoded_data}") + return results @@ -101,9 +143,12 @@ def crawl_calls_level( block_number, blockchain_type, block_timestamp, + max_batch_size=5000, + min_batch_size=4, ): calls_of_level = [] + make_multicall_result = [] for call in calls: parameters = [] @@ -118,8 +163,9 @@ def crawl_calls_level( contracts_ABIs[call["address"]][input["value"]]["name"] == "totalSupply" ): + print(responces[input["value"]][0]) parameters.append( - list(range(1, responces[input["value"]][0] + 1)) + list(range(1, responces[input["value"]][0][0] + 1)) ) else: parameters.append(responces[input["value"]]) @@ -140,43 +186,54 @@ def crawl_calls_level( } ) - for call_chunk in [ - calls_of_level[i : i + batch_size] - for i in range(0, len(calls_of_level), batch_size) - ]: - retry = 0 - while True: - try: + # for call_chunk in [ + # calls_of_level[i : i + batch_size] + # for i in range(0, len(calls_of_level), batch_size) + # ]: + retry = 0 - logger.info( - f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" - ) - with ThreadPoolExecutor(max_workers=1) as executor: - future = executor.submit( - make_multicall, - multicall_method, - call_chunk, - block_timestamp, - block_number, - ) - make_multicall_result = future.result(timeout=20) - logger.info( - f"Multicall2 returned {len(make_multicall_result)} results at block {block_number}" - ) - retry = 0 - break - except ValueError as e: - logger.info(f"ValueError: {e}, retrying") - retry = +1 - if retry > 5: - raise (e) - except TimeoutError as e: - logger.info(f"TimeoutError: {e}, retrying") - retry = +1 - if retry > 5: - raise (e) - time.sleep(2) + while len(calls_of_level) > 0: + try: + call_chunk = calls_of_level[:batch_size] + + logger.info( + f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" + ) + with ThreadPoolExecutor(max_workers=1) as executor: + future = executor.submit( + make_multicall, + multicall_method, + call_chunk, + block_timestamp, + block_number, + ) + make_multicall_result = future.result(timeout=20) + logger.info( + f"Multicall2 returned {len(make_multicall_result)} results at block {block_number}" + ) + retry = 0 + batch_size = min(batch_size * 2, max_batch_size) + calls_of_level = calls_of_level[batch_size:] + except ValueError as e: + logger.info(f"ValueError: {e}, retrying") + retry += 1 + if "missing trie node" in str(e): + time.sleep(20) + if retry > 5: + raise (e) + batch_size = max(batch_size // 3, min_batch_size) + except TimeoutError as e: + logger.info(f"TimeoutError: {e}, retrying") + retry += 1 + if retry > 5: + raise (e) + batch_size = max(batch_size // 3, min_batch_size) + except Exception as e: + logger.info(f"Exception: {e}") + raise (e) + time.sleep(2) + print(f"retry: {retry}") # results parsing and writing to database add_to_session_count = 0 for result in make_multicall_result: @@ -190,6 +247,9 @@ def crawl_calls_level( responces[result["hash"]].append(result["result"]) commit_session(db_session) logger.info(f"{add_to_session_count} labels commit to database.") + make_multicall_result = [] + + return batch_size def parse_jobs( @@ -322,7 +382,7 @@ def parse_jobs( logger.info("Crawl level: 0") logger.info(f"Jobs amount: {len(calls[0])}") - crawl_calls_level( + batch_size = crawl_calls_level( db_session, calls[0], responces, @@ -340,7 +400,7 @@ def parse_jobs( logger.info(f"Crawl level: {level}") logger.info(f"Jobs amount: {len(calls[level])}") - crawl_calls_level( + batch_size = crawl_calls_level( db_session, calls[level], responces, @@ -602,3 +662,13 @@ def main() -> None: if __name__ == "__main__": main() + + +( + False, + b"\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 Date: Wed, 9 Nov 2022 19:47:04 +0200 Subject: [PATCH 05/13] Add fixes for parameters. Remove recrawling from different level already existing data. Current state have issues from node. Timeouts and unsinchronize. --- .../mooncrawl/mooncrawl/state_crawler/cli.py | 406 +++++++++++++++++- crawlers/mooncrawl/setup.py | 2 +- 2 files changed, 388 insertions(+), 20 deletions(-) diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index b787c36b..62c2bff0 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -8,6 +8,7 @@ import logging from typing import Dict, List, Any, Optional from uuid import UUID import time +from pprint import pprint from moonstreamdb.blockchain import AvailableBlockchainType from mooncrawl.moonworm_crawler.crawler import _retry_connect_web3 @@ -16,8 +17,10 @@ from moonstreamdb.db import ( MOONSTREAM_POOL_SIZE, create_moonstream_engine, ) +import requests from sqlalchemy.orm import sessionmaker +from web3._utils.request import cache_session from .db import view_call_to_label, commit_session, clean_labels from .Multicall2_interface import Contract as Multicall2 from ..settings import ( @@ -133,6 +136,7 @@ def make_multicall( def crawl_calls_level( + web3_client, db_session, calls, responces, @@ -148,11 +152,15 @@ def crawl_calls_level( ): calls_of_level = [] - make_multicall_result = [] for call in calls: + + if call["generated_hash"] in responces: + continue parameters = [] + logger.info(f"Call: {call}") + for input in call["inputs"]: if type(input["value"]) in (str, int): @@ -167,14 +175,21 @@ def crawl_calls_level( parameters.append( list(range(1, responces[input["value"]][0][0] + 1)) ) + # parameters.append(list(range(40000, 46000))) else: parameters.append(responces[input["value"]]) + # if call["name"] == "getStats": + # pprint(responces[input["value"]]) elif type(input["value"]) == list: parameters.append(input["value"]) else: raise for call_parameters in itertools.product(*parameters): + + # hack for tuples product + if len(call_parameters) == 1 and type(call_parameters[0]) == tuple: + call_parameters = call_parameters[0] calls_of_level.append( { "address": call["address"], @@ -186,13 +201,12 @@ def crawl_calls_level( } ) - # for call_chunk in [ - # calls_of_level[i : i + batch_size] - # for i in range(0, len(calls_of_level), batch_size) - # ]: retry = 0 + print(dir(web3_client)) while len(calls_of_level) > 0: + + make_multicall_result = [] try: call_chunk = calls_of_level[:batch_size] @@ -213,13 +227,15 @@ def crawl_calls_level( f"Multicall2 returned {len(make_multicall_result)} results at block {block_number}" ) retry = 0 - batch_size = min(batch_size * 2, max_batch_size) calls_of_level = calls_of_level[batch_size:] + logger.info(f"lenght of task left {len(calls_of_level)}.") + batch_size = min(batch_size * 2, max_batch_size) except ValueError as e: logger.info(f"ValueError: {e}, retrying") retry += 1 if "missing trie node" in str(e): - time.sleep(20) + cache_session(web3_client.HTTPProvider.endpoint_uri, requests.Session()) + time.sleep(4) if retry > 5: raise (e) batch_size = max(batch_size // 3, min_batch_size) @@ -247,7 +263,6 @@ def crawl_calls_level( responces[result["hash"]].append(result["result"]) commit_session(db_session) logger.info(f"{add_to_session_count} labels commit to database.") - make_multicall_result = [] return batch_size @@ -313,7 +328,11 @@ def parse_jobs( have_subcalls = True abi["inputs"].append(input) abi["address"] = method_abi["address"] - generated_hash = hashlib.md5(json.dumps(abi).encode("utf-8")).hexdigest() + generated_hash = hashlib.md5( + json.dumps(abi, sort_keys=True, indent=4, separators=(",", ": ")).encode( + "utf-8" + ) + ).hexdigest() abi["generated_hash"] = generated_hash if have_subcalls: @@ -345,6 +364,8 @@ def parse_jobs( recursive_unpack(job, 0) + pprint(calls) + # generate contracts interfaces interfaces = {} @@ -381,8 +402,10 @@ def parse_jobs( # initial call of level 0 all call without subcalls directly moved there logger.info("Crawl level: 0") logger.info(f"Jobs amount: {len(calls[0])}") + logger.info(f"call_tree_levels: {call_tree_levels}") batch_size = crawl_calls_level( + web3_client, db_session, calls[0], responces, @@ -401,6 +424,7 @@ def parse_jobs( logger.info(f"Jobs amount: {len(calls[level])}") batch_size = crawl_calls_level( + web3_client, db_session, calls[level], responces, @@ -480,6 +504,58 @@ def handle_crawl(args: argparse.Namespace) -> None: "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", }, + # { + # "inputs": [ + # { + # "internalType": "uint256", + # "name": "_dna", + # "type": "uint256", + # "value": { + # "type": "function", + # "stateMutability": "view", + # "inputs": [ + # { + # "internalType": "uint256", + # "name": "tokenId", + # "type": "uint256", + # "value": { + # "type": "function", + # "name": "totalSupply", + # "outputs": [ + # { + # "internalType": "uint256", + # "name": "", + # "type": "uint256", + # } + # ], + # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + # "inputs": [], + # }, + # } + # ], + # "name": "getDNA", + # "outputs": [ + # {"internalType": "uint256", "name": "", "type": "uint256"} + # ], + # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + # }, + # } + # ], + # "name": "getStats", + # "outputs": [ + # {"internalType": "uint256", "name": "attack", "type": "uint256"}, + # {"internalType": "uint256", "name": "accuracy", "type": "uint256"}, + # {"internalType": "uint256", "name": "movementSpeed", "type": "uint256"}, + # {"internalType": "uint256", "name": "attackSpeed", "type": "uint256"}, + # {"internalType": "uint256", "name": "defense", "type": "uint256"}, + # {"internalType": "uint256", "name": "vitality", "type": "uint256"}, + # {"internalType": "uint256", "name": "resistance", "type": "uint256"}, + # {"internalType": "uint256", "name": "magic", "type": "uint256"}, + # ], + # "stateMutability": "view", + # "type": "function", + # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + # }, { "inputs": [ { @@ -517,17 +593,309 @@ def handle_crawl(args: argparse.Namespace) -> None: }, } ], - "name": "getStats", - "outputs": [ - {"internalType": "uint256", "name": "attack", "type": "uint256"}, - {"internalType": "uint256", "name": "accuracy", "type": "uint256"}, - {"internalType": "uint256", "name": "movementSpeed", "type": "uint256"}, - {"internalType": "uint256", "name": "attackSpeed", "type": "uint256"}, - {"internalType": "uint256", "name": "defense", "type": "uint256"}, - {"internalType": "uint256", "name": "vitality", "type": "uint256"}, - {"internalType": "uint256", "name": "resistance", "type": "uint256"}, - {"internalType": "uint256", "name": "magic", "type": "uint256"}, + "name": "getAttack", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } ], + "name": "getAccuracy", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getMovementSpeed", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getAttackSpeed", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getDefense", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getVitality", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getResistance", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], + "stateMutability": "view", + "type": "function", + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "_dna", + "type": "uint256", + "value": { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256", + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [], + }, + } + ], + "name": "getDNA", + "outputs": [ + {"internalType": "uint256", "name": "", "type": "uint256"} + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + }, + } + ], + "name": "getMagic", + "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function", "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", diff --git a/crawlers/mooncrawl/setup.py b/crawlers/mooncrawl/setup.py index f6282b7c..9c80be25 100644 --- a/crawlers/mooncrawl/setup.py +++ b/crawlers/mooncrawl/setup.py @@ -40,7 +40,7 @@ setup( "moonstreamdb>=0.3.2", "moonworm[moonstream]>=0.5.2", "humbug", - "pydantic", + "pydantic==1.9.2", "python-dateutil", "requests", "tqdm", From a4dd79a663e1c43fe500c1e39412688625029900 Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 15:45:41 +0200 Subject: [PATCH 06/13] Add read tasks from json file and custom web3 provider. --- .../mooncrawl/mooncrawl/state_crawler/cli.py | 541 ++---------------- 1 file changed, 52 insertions(+), 489 deletions(-) diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index 62c2bff0..c79704b0 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -19,8 +19,10 @@ from moonstreamdb.db import ( ) import requests from sqlalchemy.orm import sessionmaker - from web3._utils.request import cache_session +from web3 import Web3, HTTPProvider +from web3.middleware import geth_poa_middleware + from .db import view_call_to_label, commit_session, clean_labels from .Multicall2_interface import Contract as Multicall2 from ..settings import ( @@ -34,9 +36,6 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) -Multicall2_address = "0xc8E51042792d7405184DfCa245F2d27B94D013b6" - - def make_multicall( multicall_method: Any, calls: List[Any], @@ -44,14 +43,6 @@ def make_multicall( block_number: str = "latest", ) -> Any: - # multicall_calls = [ - # ( - # call["address"], - # call["method"].encode_data(call["inputs"]).hex(), - # ) - # for call in calls - # ] - multicall_calls = [] # Remove! @@ -159,7 +150,7 @@ def crawl_calls_level( continue parameters = [] - logger.info(f"Call: {call}") + logger.info(f"Call: {json.dumps(call, indent=4)}") for input in call["inputs"]: @@ -170,16 +161,13 @@ def crawl_calls_level( if ( contracts_ABIs[call["address"]][input["value"]]["name"] == "totalSupply" - ): + ): # hack for totalSupply TODO(Andrey): need add propper support for response parsing print(responces[input["value"]][0]) parameters.append( list(range(1, responces[input["value"]][0][0] + 1)) ) - # parameters.append(list(range(40000, 46000))) else: parameters.append(responces[input["value"]]) - # if call["name"] == "getStats": - # pprint(responces[input["value"]]) elif type(input["value"]) == list: parameters.append(input["value"]) else: @@ -202,7 +190,6 @@ def crawl_calls_level( ) retry = 0 - print(dir(web3_client)) while len(calls_of_level) > 0: @@ -214,6 +201,8 @@ def crawl_calls_level( logger.info( f"Calling multicall2 with {len(call_chunk)} calls at block {block_number}" ) + + # 1 thead with timeout for hung multicall calls with ThreadPoolExecutor(max_workers=1) as executor: future = executor.submit( make_multicall, @@ -230,7 +219,7 @@ def crawl_calls_level( calls_of_level = calls_of_level[batch_size:] logger.info(f"lenght of task left {len(calls_of_level)}.") batch_size = min(batch_size * 2, max_batch_size) - except ValueError as e: + except ValueError as e: # missing trie node logger.info(f"ValueError: {e}, retrying") retry += 1 if "missing trie node" in str(e): @@ -239,7 +228,7 @@ def crawl_calls_level( if retry > 5: raise (e) batch_size = max(batch_size // 3, min_batch_size) - except TimeoutError as e: + except TimeoutError as e: # timeout logger.info(f"TimeoutError: {e}, retrying") retry += 1 if retry > 5: @@ -270,6 +259,7 @@ def crawl_calls_level( def parse_jobs( jobs: List[Any], blockchain_type: AvailableBlockchainType, + web3_provider_uri: Optional[str], block_number: Optional[int], batch_size: int, access_id: UUID, @@ -282,9 +272,26 @@ def parse_jobs( contracts_methods: Dict[str, Any] = {} calls: Dict[int, Any] = {0: []} - web3_client = _retry_connect_web3( - blockchain_type=blockchain_type, access_id=access_id - ) + if web3_provider_uri is None: + + logger.info(f"Connecting to blockchain: {blockchain_type} with Node balancer.") + web3_client = _retry_connect_web3( + blockchain_type=blockchain_type, access_id=access_id + ) + else: + try: + logger.info( + f"Connecting to blockchain: {blockchain_type} with custom provider!" + ) + web3_client = Web3(HTTPProvider(web3_provider_uri)) + + if blockchain_type != AvailableBlockchainType.ETHEREUM: + web3_client.middleware_onion.inject(geth_poa_middleware, layer=0) + except Exception as e: + logger.info( + f"Web3 connection to custom provider {web3_provider_uri} failed error: {e}" + ) + raise (e) logger.info(f"Crawler started connected to blockchain: {blockchain_type}") @@ -364,8 +371,6 @@ def parse_jobs( recursive_unpack(job, 0) - pprint(calls) - # generate contracts interfaces interfaces = {} @@ -449,463 +454,18 @@ def handle_crawl(args: argparse.Namespace) -> None: Read all view methods of the contracts and crawl """ - my_jobs = [ - { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "tokenURI", - "outputs": [{"internalType": "string", "name": "", "type": "string"}], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - # { - # "inputs": [ - # { - # "internalType": "uint256", - # "name": "_dna", - # "type": "uint256", - # "value": { - # "type": "function", - # "stateMutability": "view", - # "inputs": [ - # { - # "internalType": "uint256", - # "name": "tokenId", - # "type": "uint256", - # "value": { - # "type": "function", - # "name": "totalSupply", - # "outputs": [ - # { - # "internalType": "uint256", - # "name": "", - # "type": "uint256", - # } - # ], - # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - # "inputs": [], - # }, - # } - # ], - # "name": "getDNA", - # "outputs": [ - # {"internalType": "uint256", "name": "", "type": "uint256"} - # ], - # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - # }, - # } - # ], - # "name": "getStats", - # "outputs": [ - # {"internalType": "uint256", "name": "attack", "type": "uint256"}, - # {"internalType": "uint256", "name": "accuracy", "type": "uint256"}, - # {"internalType": "uint256", "name": "movementSpeed", "type": "uint256"}, - # {"internalType": "uint256", "name": "attackSpeed", "type": "uint256"}, - # {"internalType": "uint256", "name": "defense", "type": "uint256"}, - # {"internalType": "uint256", "name": "vitality", "type": "uint256"}, - # {"internalType": "uint256", "name": "resistance", "type": "uint256"}, - # {"internalType": "uint256", "name": "magic", "type": "uint256"}, - # ], - # "stateMutability": "view", - # "type": "function", - # "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - # }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getAttack", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getAccuracy", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getMovementSpeed", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getAttackSpeed", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getDefense", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getVitality", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getResistance", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - { - "inputs": [ - { - "internalType": "uint256", - "name": "_dna", - "type": "uint256", - "value": { - "type": "function", - "stateMutability": "view", - "inputs": [ - { - "internalType": "uint256", - "name": "tokenId", - "type": "uint256", - "value": { - "type": "function", - "name": "totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256", - } - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - "inputs": [], - }, - } - ], - "name": "getDNA", - "outputs": [ - {"internalType": "uint256", "name": "", "type": "uint256"} - ], - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - } - ], - "name": "getMagic", - "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], - "stateMutability": "view", - "type": "function", - "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", - }, - ] + with open(args.jobs_file, "r") as f: + jobs = json.load(f) blockchain_type = AvailableBlockchainType(args.blockchain) parse_jobs( - my_jobs, blockchain_type, args.block_number, args.batch_size, args.access_id + jobs, + blockchain_type, + args.custom_web3_provider, + args.block_number, + args.batch_size, + args.access_id, ) @@ -915,12 +475,12 @@ def parse_abi(args: argparse.Namespace) -> None: """ with open(args.abi_file, "r") as f: - # read json and parse only stateMutability equal to view abi = json.load(f) output_json = [] for method in abi: + # read json and parse only stateMutability equal to view if method.get("stateMutability") and method["stateMutability"] == "view": output_json.append(method) @@ -979,9 +539,22 @@ def main() -> None: help="Type of blovkchain wich writng in database", required=True, ) + view_state_crawler_parser.add_argument( + "--custom-web3-provider", + "-w3", + type=str, + help="Type of blovkchain wich writng in database", + ) view_state_crawler_parser.add_argument( "--block-number", "-N", type=str, help="Block number." ) + view_state_crawler_parser.add_argument( + "--jobs-file", + "-j", + type=str, + help="Path to json file with jobs", + required=True, + ) view_state_crawler_parser.add_argument( "--batch-size", "-s", @@ -1030,13 +603,3 @@ def main() -> None: if __name__ == "__main__": main() - - -( - False, - b"\x08\xc3y\xa0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 Date: Thu, 15 Dec 2022 15:48:05 +0200 Subject: [PATCH 07/13] Add fixes. --- backend/moonstreamapi/api.py | 1 - 1 file changed, 1 deletion(-) diff --git a/backend/moonstreamapi/api.py b/backend/moonstreamapi/api.py index d165c416..0ba1fcee 100644 --- a/backend/moonstreamapi/api.py +++ b/backend/moonstreamapi/api.py @@ -33,7 +33,6 @@ tags_metadata = [ }, {"name": "dashboards", "description": "Operations with user dashboards."}, {"name": "queries", "description": "Operations with user queries."}, - {"name": "public", "description": "Operations with public endpoints."}, {"name": "streams", "description": "Operations with data streams and filters."}, {"name": "subscriptions", "description": "Operations with user subscriptions."}, {"name": "time", "description": "Server timestamp endpoints."}, From f68ab988c9aed78bd2a505d936caa2bf03cae4ea Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 16:18:44 +0200 Subject: [PATCH 08/13] Add jobs folder. --- .../state_crawler/jobs/mumbai-jobs.json | 35 +++++++++++++++++++ .../state_crawler/jobs/polygon-jobs.json | 35 +++++++++++++++++++ 2 files changed, 70 insertions(+) create mode 100644 crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json create mode 100644 crawlers/mooncrawl/mooncrawl/state_crawler/jobs/polygon-jobs.json diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json b/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json new file mode 100644 index 00000000..661a6833 --- /dev/null +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json @@ -0,0 +1,35 @@ +[ + { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "address": "0x39858b1A4e48CfFB1019F0A15ff54899213B3f8b", + "inputs": [] + } + } + ], + "name": "tokenURI", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "address": "0x39858b1A4e48CfFB1019F0A15ff54899213B3f8b" + } +] \ No newline at end of file diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/polygon-jobs.json b/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/polygon-jobs.json new file mode 100644 index 00000000..4df3a5e8 --- /dev/null +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/polygon-jobs.json @@ -0,0 +1,35 @@ +[ + { + "type": "function", + "stateMutability": "view", + "inputs": [ + { + "internalType": "uint256", + "name": "tokenId", + "type": "uint256", + "value": { + "type": "function", + "name": "totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f", + "inputs": [] + } + } + ], + "name": "tokenURI", + "outputs": [ + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "address": "0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f" + } +] \ No newline at end of file From e64bb7fcad5052b5492b5184b5f21328df3e1d18 Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 17:27:51 +0200 Subject: [PATCH 09/13] Add deploy services and fix if condition. --- crawlers/deploy/mumbai-state.service | 13 +++++++ crawlers/deploy/mumbai-state.timer | 9 +++++ crawlers/deploy/polygon-state.service | 2 +- crawlers/mooncrawl/mooncrawl/settings.py | 21 +++++++++++ .../mooncrawl/mooncrawl/state_crawler/cli.py | 37 ++++++++++++++----- crawlers/mooncrawl/sample.env | 3 +- 6 files changed, 73 insertions(+), 12 deletions(-) create mode 100644 crawlers/deploy/mumbai-state.service create mode 100644 crawlers/deploy/mumbai-state.timer diff --git a/crawlers/deploy/mumbai-state.service b/crawlers/deploy/mumbai-state.service new file mode 100644 index 00000000..8b11512b --- /dev/null +++ b/crawlers/deploy/mumbai-state.service @@ -0,0 +1,13 @@ +[Unit] +Description=Execute state crawler +After=network.target + +[Service] +Type=oneshot +User=ubuntu +Group=www-data +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain mumbai --infura --jobs-file ./mooncrawl/state_crawler/jobs/mumbai-jobs.json +CPUWeight=60 +SyslogIdentifier=polygon-state diff --git a/crawlers/deploy/mumbai-state.timer b/crawlers/deploy/mumbai-state.timer new file mode 100644 index 00000000..76ea17be --- /dev/null +++ b/crawlers/deploy/mumbai-state.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Polygon state crawler each 10m + +[Timer] +OnBootSec=15s +OnUnitActiveSec=10m + +[Install] +WantedBy=timers.target diff --git a/crawlers/deploy/polygon-state.service b/crawlers/deploy/polygon-state.service index 46ddd88e..2bdcbadc 100644 --- a/crawlers/deploy/polygon-state.service +++ b/crawlers/deploy/polygon-state.service @@ -8,6 +8,6 @@ User=ubuntu Group=www-data WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env -ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain polygon +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain polygon --infura --jobs-file ./mooncrawl/state_crawler/jobs/polygon-jobs.json CPUWeight=60 SyslogIdentifier=polygon-state diff --git a/crawlers/mooncrawl/mooncrawl/settings.py b/crawlers/mooncrawl/mooncrawl/settings.py index 66e50dd6..11e654c5 100644 --- a/crawlers/mooncrawl/mooncrawl/settings.py +++ b/crawlers/mooncrawl/mooncrawl/settings.py @@ -210,3 +210,24 @@ if MOONSTREAM_S3_PUBLIC_DATA_BUCKET == "": MOONSTREAM_S3_PUBLIC_DATA_BUCKET_PREFIX = os.environ.get( "MOONSTREAM_S3_PUBLIC_DATA_BUCKET_PREFIX", "dev" ) + + +# infura config + + +INFURA_PROJECT_ID = os.environ.get("INFURA_PROJECT_ID") + +infura_networks = { + AvailableBlockchainType.ETHEREUM: { + "name": "mainnet", + "url": f"https://mainnet.infura.io/v3/{INFURA_PROJECT_ID}", + }, + AvailableBlockchainType.POLYGON: { + "name": "polygon", + "url": f"https://polygon-mainnet.infura.io/v3/{INFURA_PROJECT_ID}", + }, + AvailableBlockchainType.MUMBAI: { + "name": "mumbai", + "url": f"https://polygon-mumbai.infura.io/v3/{INFURA_PROJECT_ID}", + }, +} diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index c79704b0..d4a9ce03 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -28,9 +28,11 @@ from .Multicall2_interface import Contract as Multicall2 from ..settings import ( NB_CONTROLLER_ACCESS_ID, MOONSTREAM_STATE_CRAWLER_DB_STATEMENT_TIMEOUT_MILLIS, + INFURA_PROJECT_ID, multicall_contracts, + infura_networks, ) -from .web3_util import FunctionSignature +from .web3_util import FunctionSignature, connect logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) @@ -272,18 +274,13 @@ def parse_jobs( contracts_methods: Dict[str, Any] = {} calls: Dict[int, Any] = {0: []} - if web3_provider_uri is None: - - logger.info(f"Connecting to blockchain: {blockchain_type} with Node balancer.") - web3_client = _retry_connect_web3( - blockchain_type=blockchain_type, access_id=access_id - ) - else: + if web3_provider_uri is not None: try: logger.info( f"Connecting to blockchain: {blockchain_type} with custom provider!" ) - web3_client = Web3(HTTPProvider(web3_provider_uri)) + + web3_client = connect(web3_provider_uri) if blockchain_type != AvailableBlockchainType.ETHEREUM: web3_client.middleware_onion.inject(geth_poa_middleware, layer=0) @@ -292,6 +289,11 @@ def parse_jobs( f"Web3 connection to custom provider {web3_provider_uri} failed error: {e}" ) raise (e) + else: + logger.info(f"Connecting to blockchain: {blockchain_type} with Node balancer.") + web3_client = _retry_connect_web3( + blockchain_type=blockchain_type, access_id=access_id + ) logger.info(f"Crawler started connected to blockchain: {blockchain_type}") @@ -459,10 +461,20 @@ def handle_crawl(args: argparse.Namespace) -> None: blockchain_type = AvailableBlockchainType(args.blockchain) + custom_web3_provider = args.custom_web3_provider + + if args.infura and INFURA_PROJECT_ID is not None: + if blockchain_type not in infura_networks: + raise ValueError( + f"Infura is not supported for {blockchain_type} blockchain type" + ) + logger.info(f"Using Infura!") + custom_web3_provider = infura_networks[blockchain_type]["url"] + parse_jobs( jobs, blockchain_type, - args.custom_web3_provider, + custom_web3_provider, args.block_number, args.batch_size, args.access_id, @@ -539,6 +551,11 @@ def main() -> None: help="Type of blovkchain wich writng in database", required=True, ) + view_state_crawler_parser.add_argument( + "--infura", + action="store_true", + help="Use infura as web3 provider", + ) view_state_crawler_parser.add_argument( "--custom-web3-provider", "-w3", diff --git a/crawlers/mooncrawl/sample.env b/crawlers/mooncrawl/sample.env index 5e7e8980..74ab5f8f 100644 --- a/crawlers/mooncrawl/sample.env +++ b/crawlers/mooncrawl/sample.env @@ -36,4 +36,5 @@ export COINMARKETCAP_API_KEY="" # Custom crawler export MOONSTREAM_S3_PUBLIC_DATA_BUCKET="" export MOONSTREAM_S3_PUBLIC_DATA_BUCKET_PREFIX="dev" -export MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN="" \ No newline at end of file +export MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN="" +export INFURA_PROJECT_ID="" \ No newline at end of file From 9cfd12dc8dc6b8040f4c9cf1f17e1d537f282964 Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 17:34:49 +0200 Subject: [PATCH 10/13] Add state cleaner. --- crawlers/deploy/mumbai-state-clean.service | 13 +++++++++++++ crawlers/deploy/mumbai-state-clean.timer | 9 +++++++++ 2 files changed, 22 insertions(+) create mode 100644 crawlers/deploy/mumbai-state-clean.service create mode 100644 crawlers/deploy/mumbai-state-clean.timer diff --git a/crawlers/deploy/mumbai-state-clean.service b/crawlers/deploy/mumbai-state-clean.service new file mode 100644 index 00000000..c909d5cf --- /dev/null +++ b/crawlers/deploy/mumbai-state-clean.service @@ -0,0 +1,13 @@ +[Unit] +Description=Execute state clean labels crawler +After=network.target + +[Service] +Type=oneshot +User=ubuntu +Group=www-data +WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl +EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" clean-state-labels --blockchain mumbai -N 10000 +CPUWeight=60 +SyslogIdentifier=polygon-state-clean diff --git a/crawlers/deploy/mumbai-state-clean.timer b/crawlers/deploy/mumbai-state-clean.timer new file mode 100644 index 00000000..cfb37c0c --- /dev/null +++ b/crawlers/deploy/mumbai-state-clean.timer @@ -0,0 +1,9 @@ +[Unit] +Description=Execute Polygon state clean labels crawler each 25m + +[Timer] +OnBootSec=50s +OnUnitActiveSec=25m + +[Install] +WantedBy=timers.target From b16a4ff1a13f40ba0fd8e10ee39da4da480c94aa Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 19:31:24 +0200 Subject: [PATCH 11/13] Add fixes. --- crawlers/deploy/mumbai-state.service | 2 +- crawlers/deploy/polygon-state.service | 2 +- crawlers/mooncrawl/mooncrawl/state_crawler/cli.py | 12 ++++-------- 3 files changed, 6 insertions(+), 10 deletions(-) diff --git a/crawlers/deploy/mumbai-state.service b/crawlers/deploy/mumbai-state.service index 8b11512b..5ef365ed 100644 --- a/crawlers/deploy/mumbai-state.service +++ b/crawlers/deploy/mumbai-state.service @@ -8,6 +8,6 @@ User=ubuntu Group=www-data WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env -ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain mumbai --infura --jobs-file ./mooncrawl/state_crawler/jobs/mumbai-jobs.json +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain mumbai --infura --jobs-file /home/ubuntu/moonstream/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json CPUWeight=60 SyslogIdentifier=polygon-state diff --git a/crawlers/deploy/polygon-state.service b/crawlers/deploy/polygon-state.service index 2bdcbadc..1effefba 100644 --- a/crawlers/deploy/polygon-state.service +++ b/crawlers/deploy/polygon-state.service @@ -8,6 +8,6 @@ User=ubuntu Group=www-data WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env -ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain polygon --infura --jobs-file ./mooncrawl/state_crawler/jobs/polygon-jobs.json +ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain polygon --infura --jobs-file /home/ubuntu/moonstream/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/polygon-jobs.json CPUWeight=60 SyslogIdentifier=polygon-state diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py index d4a9ce03..a464ffe2 100644 --- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py +++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py @@ -47,9 +47,6 @@ def make_multicall( multicall_calls = [] - # Remove! - logger.info(f"multicall for methods {set([call['method'].name for call in calls])}") - for call in calls: try: multicall_calls.append( @@ -222,22 +219,21 @@ def crawl_calls_level( logger.info(f"lenght of task left {len(calls_of_level)}.") batch_size = min(batch_size * 2, max_batch_size) except ValueError as e: # missing trie node - logger.info(f"ValueError: {e}, retrying") + logger.error(f"ValueError: {e}, retrying") retry += 1 if "missing trie node" in str(e): - cache_session(web3_client.HTTPProvider.endpoint_uri, requests.Session()) time.sleep(4) if retry > 5: raise (e) batch_size = max(batch_size // 3, min_batch_size) except TimeoutError as e: # timeout - logger.info(f"TimeoutError: {e}, retrying") + logger.error(f"TimeoutError: {e}, retrying") retry += 1 if retry > 5: raise (e) batch_size = max(batch_size // 3, min_batch_size) except Exception as e: - logger.info(f"Exception: {e}") + logger.error(f"Exception: {e}") raise (e) time.sleep(2) print(f"retry: {retry}") @@ -285,7 +281,7 @@ def parse_jobs( if blockchain_type != AvailableBlockchainType.ETHEREUM: web3_client.middleware_onion.inject(geth_poa_middleware, layer=0) except Exception as e: - logger.info( + logger.error( f"Web3 connection to custom provider {web3_provider_uri} failed error: {e}" ) raise (e) From c7cfe7a160a7ecaa9d89b5b0321348a994aafeda Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 19:43:43 +0200 Subject: [PATCH 12/13] Fix service and timer to mumbai name. --- crawlers/deploy/mumbai-state-clean.service | 2 +- crawlers/deploy/mumbai-state-clean.timer | 2 +- crawlers/deploy/mumbai-state.service | 2 +- crawlers/deploy/mumbai-state.timer | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crawlers/deploy/mumbai-state-clean.service b/crawlers/deploy/mumbai-state-clean.service index c909d5cf..84fd553d 100644 --- a/crawlers/deploy/mumbai-state-clean.service +++ b/crawlers/deploy/mumbai-state-clean.service @@ -10,4 +10,4 @@ WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" clean-state-labels --blockchain mumbai -N 10000 CPUWeight=60 -SyslogIdentifier=polygon-state-clean +SyslogIdentifier=mumbai-state-clean diff --git a/crawlers/deploy/mumbai-state-clean.timer b/crawlers/deploy/mumbai-state-clean.timer index cfb37c0c..e29cd6a8 100644 --- a/crawlers/deploy/mumbai-state-clean.timer +++ b/crawlers/deploy/mumbai-state-clean.timer @@ -1,5 +1,5 @@ [Unit] -Description=Execute Polygon state clean labels crawler each 25m +Description=Execute Mumbai state clean labels crawler each 25m [Timer] OnBootSec=50s diff --git a/crawlers/deploy/mumbai-state.service b/crawlers/deploy/mumbai-state.service index 5ef365ed..8493b385 100644 --- a/crawlers/deploy/mumbai-state.service +++ b/crawlers/deploy/mumbai-state.service @@ -10,4 +10,4 @@ WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --blockchain mumbai --infura --jobs-file /home/ubuntu/moonstream/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/mumbai-jobs.json CPUWeight=60 -SyslogIdentifier=polygon-state +SyslogIdentifier=mumbai-state diff --git a/crawlers/deploy/mumbai-state.timer b/crawlers/deploy/mumbai-state.timer index 76ea17be..48e17e68 100644 --- a/crawlers/deploy/mumbai-state.timer +++ b/crawlers/deploy/mumbai-state.timer @@ -1,5 +1,5 @@ [Unit] -Description=Execute Polygon state crawler each 10m +Description=Execute Mumbai state crawler each 10m [Timer] OnBootSec=15s From 338b321b1f5b375a0d454ed2a9dd7eaa9ca5db7c Mon Sep 17 00:00:00 2001 From: Andrey Date: Thu, 15 Dec 2022 20:09:27 +0200 Subject: [PATCH 13/13] Add deploy changes --- crawlers/deploy/deploy.bash | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/crawlers/deploy/deploy.bash b/crawlers/deploy/deploy.bash index f8b565e4..9fd5bba7 100755 --- a/crawlers/deploy/deploy.bash +++ b/crawlers/deploy/deploy.bash @@ -58,6 +58,10 @@ MUMBAI_SYNCHRONIZE_SERVICE="mumbai-synchronize.service" MUMBAI_MISSING_SERVICE_FILE="mumbai-missing.service" MUMBAI_MISSING_TIMER_FILE="mumbai-missing.timer" MUMBAI_MOONWORM_CRAWLER_SERVICE_FILE="mumbai-moonworm-crawler.service" +MUMBAI_STATE_SERVICE_FILE="mumbai-state.service" +MUMBAI_STATE_TIMER_FILE="mumbai-state.timer" +MUMBAI_STATE_CLEAN_SERVICE_FILE="mumbai-state-clean.service" +MUMBAI_STATE_CLEAN_TIMER_FILE="mumbai-state-clean.timer" # XDai service files XDAI_SYNCHRONIZE_SERVICE="xdai-synchronize.service" @@ -289,3 +293,23 @@ cp "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE}" "/etc/systemd cp "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}" "/etc/systemd/system/${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}" systemctl daemon-reload systemctl restart --no-block "${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}" + + + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing MUMBAI state service and timer with: ${MUMBAI_STATE_SERVICE_FILE}, ${MUMBAI_STATE_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${MUMBAI_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${MUMBAI_STATE_TIMER_FILE}" +cp "${SCRIPT_DIR}/${MUMBAI_STATE_SERVICE_FILE}" "/etc/systemd/system/${MUMBAI_STATE_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${MUMBAI_STATE_TIMER_FILE}" "/etc/systemd/system/${MUMBAI_STATE_TIMER_FILE}" +systemctl daemon-reload +systemctl restart --no-block "${MUMBAI_STATE_TIMER_FILE}" + +echo +echo +echo -e "${PREFIX_INFO} Replacing existing MUMBAI state clean service and timer with: ${MUMBAI_STATE_CLEAN_SERVICE_FILE}, ${MUMBAI_STATE_CLEAN_TIMER_FILE}" +chmod 644 "${SCRIPT_DIR}/${MUMBAI_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${MUMBAI_STATE_CLEAN_TIMER_FILE}" +cp "${SCRIPT_DIR}/${MUMBAI_STATE_CLEAN_SERVICE_FILE}" "/etc/systemd/system/${MUMBAI_STATE_CLEAN_SERVICE_FILE}" +cp "${SCRIPT_DIR}/${MUMBAI_STATE_CLEAN_TIMER_FILE}" "/etc/systemd/system/${MUMBAI_STATE_CLEAN_TIMER_FILE}" +systemctl daemon-reload +systemctl restart --no-block "${MUMBAI_STATE_CLEAN_TIMER_FILE}" \ No newline at end of file