Revert "checking function abi before creating job"

pull/584/head
Neeraj Kashyap 2022-04-14 16:00:06 -07:00 zatwierdzone przez GitHub
rodzic 25ffedfe07
commit 84cbfcc38a
Nie znaleziono w bazie danych klucza dla tego podpisu
ID klucza GPG: 4AEE18F83AFDEB23
2 zmienionych plików z 9 dodań i 21 usunięć

Wyświetl plik

@ -11,7 +11,6 @@ from ..settings import MOONSTREAM_MOONWORM_TASKS_JOURNAL, bugout_client
from .continuous_crawler import _retry_connect_web3, continuous_crawler
from .crawler import (
SubscriptionTypes,
blockchain_type_to_subscription_type,
get_crawl_job_entries,
make_event_crawl_jobs,
make_function_call_crawl_jobs,
@ -24,13 +23,9 @@ logger = logging.getLogger(__name__)
def handle_crawl(args: argparse.Namespace) -> None:
blockchain_type = AvailableBlockchainType(args.blockchain_type)
logger.info(f"Blockchain type: {blockchain_type.value}")
initial_event_jobs = make_event_crawl_jobs(
get_crawl_job_entries(
blockchain_type_to_subscription_type(blockchain_type),
SubscriptionTypes.POLYGON_BLOCKCHAIN,
"event",
MOONSTREAM_MOONWORM_TASKS_JOURNAL,
)
@ -39,7 +34,7 @@ def handle_crawl(args: argparse.Namespace) -> None:
initial_function_call_jobs = make_function_call_crawl_jobs(
get_crawl_job_entries(
blockchain_type_to_subscription_type(blockchain_type),
SubscriptionTypes.POLYGON_BLOCKCHAIN,
"function",
MOONSTREAM_MOONWORM_TASKS_JOURNAL,
)
@ -47,6 +42,12 @@ def handle_crawl(args: argparse.Namespace) -> None:
logger.info(
f"Initial function call crawl jobs count: {len(initial_function_call_jobs)}"
)
# Couldn't figure out how to convert from string to AvailableBlockchainType
# AvailableBlockchainType(args.blockchain_type) is not working
blockchain_type = AvailableBlockchainType(args.blockchain_type)
logger.info(f"Blockchain type: {blockchain_type.value}")
with yield_db_session_ctx() as db_session:
web3: Optional[Web3] = None
if args.web3 is None:

Wyświetl plik

@ -154,14 +154,12 @@ def get_crawl_job_entries(
offset=current_offset,
limit=limit,
)
entries.extend(search_result.results)
# if len(entries) >= search_result.total_results:
if len(search_result.results) == 0:
break
current_offset += limit
return entries
@ -219,15 +217,7 @@ def make_function_call_crawl_jobs(
created_at=int(datetime.fromisoformat(entry.created_at).timestamp()),
)
else:
old_selectors = [
encode_function_signature(function_abi)
for function_abi in crawl_job_by_address[contract_address].contract_abi
]
new_selector = encode_function_signature(json.loads(abi))
if new_selector not in old_selectors:
crawl_job_by_address[contract_address].contract_abi.append(
json.loads(abi)
)
crawl_job_by_address[contract_address].contract_abi.append(json.loads(abi))
return [crawl_job for crawl_job in crawl_job_by_address.values()]
@ -293,9 +283,6 @@ def merge_function_call_crawl_jobs(
)
break
else:
# No old job with new job address was found
# This else is intended for `for`
# https://book.pythontips.com/en/latest/for_-_else.html
old_crawl_jobs.append(new_crawl_job)
return old_crawl_jobs