Merge branch 'main' into complete-call-request

pull/932/head
kompotkot 2024-02-01 10:21:04 +00:00
commit 52fdbeede9
33 zmienionych plików z 770 dodań i 247 usunięć

Wyświetl plik

@ -39,7 +39,7 @@ chown ubuntu:ubuntu "/home/ubuntu/monitoring"
echo
echo
echo -e "${PREFIX_INFO} Retrieving monitoring deployment parameters"
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" /home/ubuntu/go/bin/checkenv show aws_ssm+crawlers:true,monitoring:true > "${PARAMETERS_ENV_MONITORING_PATH}"
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" /home/ubuntu/go/bin/checkenv show aws_ssm+service:true,monitoring:true > "${PARAMETERS_ENV_MONITORING_PATH}"
chmod 0640 "${PARAMETERS_ENV_MONITORING_PATH}"
echo
@ -49,7 +49,7 @@ echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_MONITORIN
echo
echo
echo -e "${PREFIX_INFO} Add AWS default region to monitring parameters"
echo -e "${PREFIX_INFO} Add AWS default region to monitoring parameters"
echo "AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION}" >> "${PARAMETERS_ENV_MONITORING_PATH}"
echo

Wyświetl plik

@ -47,8 +47,6 @@ ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="ethereum-historical-crawl-events.ti
POLYGON_SYNCHRONIZE_SERVICE="polygon-synchronize.service"
POLYGON_MISSING_SERVICE_FILE="polygon-missing.service"
POLYGON_MISSING_TIMER_FILE="polygon-missing.timer"
POLYGON_STATISTICS_SERVICE_FILE="polygon-statistics.service"
POLYGON_STATISTICS_TIMER_FILE="polygon-statistics.timer"
POLYGON_MOONWORM_CRAWLER_SERVICE_FILE="polygon-moonworm-crawler.service"
POLYGON_STATE_SERVICE_FILE="polygon-state.service"
POLYGON_STATE_TIMER_FILE="polygon-state.timer"
@ -56,8 +54,6 @@ POLYGON_STATE_CLEAN_SERVICE_FILE="polygon-state-clean.service"
POLYGON_STATE_CLEAN_TIMER_FILE="polygon-state-clean.timer"
POLYGON_METADATA_SERVICE_FILE="polygon-metadata.service"
POLYGON_METADATA_TIMER_FILE="polygon-metadata.timer"
POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE="polygon-cu-reports-tokenonomics.service"
POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE="polygon-cu-reports-tokenonomics.timer"
POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE="polygon-cu-nft-dashboard.service"
POLYGON_CU_NFT_DASHBOARD_TIMER_FILE="polygon-cu-nft-dashboard.timer"
POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="polygon-historical-crawl-transactions.service"
@ -86,8 +82,6 @@ MUMBAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="mumbai-historical-crawl-events.timer"
XDAI_SYNCHRONIZE_SERVICE="xdai-synchronize.service"
XDAI_MISSING_SERVICE_FILE="xdai-missing.service"
XDAI_MISSING_TIMER_FILE="xdai-missing.timer"
XDAI_STATISTICS_SERVICE_FILE="xdai-statistics.service"
XDAI_STATISTICS_TIMER_FILE="xdai-statistics.timer"
XDAI_MOONWORM_CRAWLER_SERVICE_FILE="xdai-moonworm-crawler.service"
XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="xdai-historical-crawl-transactions.service"
XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="xdai-historical-crawl-transactions.timer"
@ -98,8 +92,6 @@ XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="xdai-historical-crawl-events.timer"
WYRM_SYNCHRONIZE_SERVICE="wyrm-synchronize.service"
WYRM_MISSING_SERVICE_FILE="wyrm-missing.service"
WYRM_MISSING_TIMER_FILE="wyrm-missing.timer"
WYRM_STATISTICS_SERVICE_FILE="wyrm-statistics.service"
WYRM_STATISTICS_TIMER_FILE="wyrm-statistics.timer"
WYRM_MOONWORM_CRAWLER_SERVICE_FILE="wyrm-moonworm-crawler.service"
WYRM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="wyrm-historical-crawl-transactions.service"
WYRM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="wyrm-historical-crawl-transactions.timer"
@ -115,6 +107,10 @@ ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="zksync-era-historical-cra
ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="zksync-era-historical-crawl-transactions.timer"
ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="zksync-era-historical-crawl-events.service"
ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="zksync-era-historical-crawl-events.timer"
ZKSYNC_ERA_STATE_SERVICE_FILE="zksync-era-state.service"
ZKSYNC_ERA_STATE_TIMER_FILE="zksync-era-state.timer"
ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE="zksync-era-state-clean.service"
ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE="zksync-era-state-clean.timer"
# ZkSync Era testnet
ZKSYNC_ERA_TESTNET_SYNCHRONIZE_SERVICE="zksync-era-testnet-synchronize.service"
@ -252,14 +248,6 @@ cp "${SCRIPT_DIR}/${POLYGON_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/u
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_MISSING_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing Polygon statistics dashbord service and timer with: ${POLYGON_STATISTICS_SERVICE_FILE}, ${POLYGON_STATISTICS_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${POLYGON_STATISTICS_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_STATISTICS_TIMER_FILE}"
cp "${SCRIPT_DIR}/${POLYGON_STATISTICS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATISTICS_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${POLYGON_STATISTICS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATISTICS_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_STATISTICS_TIMER_FILE}"
echo
echo
@ -296,14 +284,6 @@ cp "${SCRIPT_DIR}/${POLYGON_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_METADATA_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing Polygon CU reports tokenonomics service and timer with: ${POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE}, ${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}"
cp "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_CU_REPORTS_TOKENONOMICS_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_CU_REPORTS_TOKENONOMICS_TIMER_FILE}"
echo
echo
@ -419,14 +399,6 @@ cp "${SCRIPT_DIR}/${XDAI_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDAI_MISSING_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing XDai statistics dashbord service and timer with: ${XDAI_STATISTICS_SERVICE_FILE}, ${XDAI_STATISTICS_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${XDAI_STATISTICS_SERVICE_FILE}" "${SCRIPT_DIR}/${XDAI_STATISTICS_TIMER_FILE}"
cp "${SCRIPT_DIR}/${XDAI_STATISTICS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XDAI_STATISTICS_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${XDAI_STATISTICS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XDAI_STATISTICS_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDAI_STATISTICS_TIMER_FILE}"
echo
echo
@ -471,15 +443,6 @@ cp "${SCRIPT_DIR}/${WYRM_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${WYRM_MISSING_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing Wyrm statistics dashbord service and timer with: ${WYRM_STATISTICS_SERVICE_FILE}, ${WYRM_STATISTICS_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${WYRM_STATISTICS_SERVICE_FILE}" "${SCRIPT_DIR}/${WYRM_STATISTICS_TIMER_FILE}"
cp "${SCRIPT_DIR}/${WYRM_STATISTICS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${WYRM_STATISTICS_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${WYRM_STATISTICS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${WYRM_STATISTICS_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${WYRM_STATISTICS_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing Wyrm moonworm crawler service definition with ${WYRM_MOONWORM_CRAWLER_SERVICE_FILE}"
@ -550,6 +513,23 @@ cp "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubunt
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era state service and timer with: ${ZKSYNC_ERA_STATE_SERVICE_FILE}, ${ZKSYNC_ERA_STATE_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_TIMER_FILE}"
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_STATE_TIMER_FILE}"
echo
echo
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era state clean service and timer with: ${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}, ${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}"
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
# ZkSync Era testnet
echo

Wyświetl plik

@ -1,11 +0,0 @@
[Unit]
Description=Runs custom crawler for CU tokenonomics
After=network.target
[Service]
Type=oneshot
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.reports_crawler.cli cu-reports --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" queries run-tokenonomics
CPUWeight=60
SyslogIdentifier=polygon-cu-reports-tokenonomics

Wyświetl plik

@ -1,9 +0,0 @@
[Unit]
Description=Runs custom crawler for CU tokenonomics
[Timer]
OnBootSec=60s
OnUnitActiveSec=60m
[Install]
WantedBy=timers.target

Wyświetl plik

@ -1,11 +0,0 @@
[Unit]
Description=Update Polygon statistics dashboards
After=network.target
[Service]
Type=oneshot
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.stats_worker.dashboard --access-id "${NB_CONTROLLER_ACCESS_ID}" generate --blockchain polygon
CPUWeight=60
SyslogIdentifier=polygon-statistics

Wyświetl plik

@ -1,9 +0,0 @@
[Unit]
Description=Update Polygon statistics dashboards each 6 hours
[Timer]
OnBootSec=20s
OnUnitActiveSec=6h
[Install]
WantedBy=timers.target

Wyświetl plik

@ -1,9 +0,0 @@
[Unit]
Description=Update Wyrm statistics dashboards each 6 hours
[Timer]
OnBootSec=25s
OnUnitActiveSec=6h
[Install]
WantedBy=timers.target

Wyświetl plik

@ -1,11 +0,0 @@
[Unit]
Description=Update XDai statistics dashboards
After=network.target
[Service]
Type=oneshot
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.stats_worker.dashboard --access-id "${NB_CONTROLLER_ACCESS_ID}" generate --blockchain xdai
CPUWeight=60
SyslogIdentifier=xdai-statistics

Wyświetl plik

@ -1,9 +0,0 @@
[Unit]
Description=Update XDai statistics dashboards each 6 hours
[Timer]
OnBootSec=25s
OnUnitActiveSec=6h
[Install]
WantedBy=timers.target

Wyświetl plik

@ -1,11 +1,11 @@
[Unit]
Description=Update Wyrm statistics dashboards
Description=Execute state clean labels crawler
After=network.target
[Service]
Type=oneshot
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.stats_worker.dashboard --access-id "${NB_CONTROLLER_ACCESS_ID}" generate --blockchain wyrm
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" clean-state-labels --blockchain zksync_era -N 10000
CPUWeight=60
SyslogIdentifier=wyrm-statistics
SyslogIdentifier=zksync-era-state-clean

Wyświetl plik

@ -0,0 +1,9 @@
[Unit]
Description=Execute Zksync Era state clean labels crawler each 25m
[Timer]
OnBootSec=50s
OnUnitActiveSec=25m
[Install]
WantedBy=timers.target

Wyświetl plik

@ -0,0 +1,11 @@
[Unit]
Description=Execute state crawler
After=network.target
[Service]
Type=oneshot
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain zksync_era --jobs-file /home/ubuntu/moonstream/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/zksync-era-jobs.json
CPUWeight=60
SyslogIdentifier=zksync-era-state

Wyświetl plik

@ -0,0 +1,9 @@
[Unit]
Description=Execute Zksync Era state crawler each 10m
[Timer]
OnBootSec=15s
OnUnitActiveSec=10m
[Install]
WantedBy=timers.target

Wyświetl plik

@ -7,12 +7,14 @@ import uuid
import requests # type: ignore
from bugout.data import BugoutSearchResult
from .utils import get_results_for_moonstream_query
from .utils import get_results_for_moonstream_query, leaderboard_push_batch
from ..settings import (
MOONSTREAM_ADMIN_ACCESS_TOKEN,
MOONSTREAM_LEADERBOARD_GENERATOR_JOURNAL_ID,
MOONSTREAM_API_URL,
MOONSTREAM_ENGINE_URL,
MOONSTREAM_LEADERBOARD_GENERATOR_BATCH_SIZE,
MOONSTREAM_LEADERBOARD_GENERATOR_PUSH_TIMEOUT_SECONDS,
)
from ..settings import bugout_client as bc
@ -35,10 +37,15 @@ def handle_leaderboards(args: argparse.Namespace) -> None:
### get leaderboard journal
leaderboard_push_batch_size = args.leaderboard_push_batch_size
leaderboard_push_timeout_seconds = args.leaderboard_push_timeout_seconds
query = "#leaderboard #status:active"
if args.leaderboard_id: # way to run only one leaderboard
query += f" #leaderboard_id:{args.leaderboard_id}"
if args.leaderboard_id: # way to run only one leaderboard without status:active
query = f"#leaderboard #leaderboard_id:{args.leaderboard_id}"
try:
leaderboards = bc.search(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
@ -116,26 +123,33 @@ def handle_leaderboards(args: argparse.Namespace) -> None:
"Content-Type": "application/json",
}
try:
leaderboard_api_response = requests.put(
leaderboard_push_api_url,
json=query_results["data"],
headers=leaderboard_api_headers,
timeout=10,
if len(query_results["data"]) > leaderboard_push_batch_size:
logger.info(
f"Pushing {len(query_results['data'])} scores to leaderboard {leaderboard_id} in batches of {leaderboard_push_batch_size}"
)
except Exception as e:
logger.error(
f"Could not push results to leaderboard API: {e} for leaderboard {leaderboard_id}"
leaderboard_push_batch(
leaderboard_id,
leaderboard_data,
query_results["data"],
leaderboard_api_headers,
leaderboard_push_batch_size,
timeout=leaderboard_push_timeout_seconds,
)
continue
try:
leaderboard_api_response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not push results to leaderboard API: {http_error.response.text} with status code {http_error.response.status_code}"
)
continue
else:
try:
leaderboard_api_response = requests.put(
leaderboard_push_api_url,
json=query_results["data"],
headers=leaderboard_api_headers,
timeout=leaderboard_push_timeout_seconds,
)
leaderboard_api_response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not push results to leaderboard API: {http_error.response.text} with status code {http_error.response.status_code}"
)
continue
### get leaderboard from leaderboard API
@ -213,6 +227,18 @@ def main():
required=True,
help="Moonstream Access Token to use for Moonstream Query API requests",
)
leaderboard_generator_parser.add_argument(
"--leaderboard-push-batch-size",
type=int,
default=MOONSTREAM_LEADERBOARD_GENERATOR_BATCH_SIZE,
help="Number of scores to push to leaderboard API at once",
)
leaderboard_generator_parser.add_argument(
"--leaderboard-push-timeout-seconds",
type=int,
default=MOONSTREAM_LEADERBOARD_GENERATOR_PUSH_TIMEOUT_SECONDS,
help="Timeout for leaderboard API requests",
)
leaderboard_generator_parser.set_defaults(func=handle_leaderboards)

Wyświetl plik

@ -3,12 +3,17 @@ import json
import logging
import os
import time
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List
import requests # type: ignore
from ..settings import MOONSTREAM_API_URL
from ..settings import (
MOONSTREAM_API_URL,
MOONSTREAM_ENGINE_URL,
MOONSTREAM_LEADERBOARD_GENERATOR_BATCH_SIZE,
MOONSTREAM_LEADERBOARD_GENERATOR_PUSH_TIMEOUT_SECONDS,
)
logging.basicConfig()
@ -101,3 +106,115 @@ def get_results_for_moonstream_query(
keep_going = num_retries <= max_retries
return result
def get_data_from_url(url):
response = requests.get(url)
if response.status_code == 200:
return response.json()
else:
raise Exception(f"Failed to get data: HTTP {response.status_code}")
def send_data_to_endpoint(chunks, endpoint_url, headers, timeout=10):
for index, chunk in enumerate(chunks):
try:
logger.info(f"Pushing chunk {index} to leaderboard API")
response = requests.put(
endpoint_url, headers=headers, json=chunk, timeout=timeout
)
response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not push results to leaderboard API: {http_error.response.text} with status code {http_error.response.status_code}"
)
raise http_error
def leaderboard_push_batch(
leaderboard_id: str,
leaderboard_config: Dict[str, Any],
data: List[Dict[str, Any]],
headers: Dict[str, str],
batch_size: int = MOONSTREAM_LEADERBOARD_GENERATOR_BATCH_SIZE,
timeout: int = 10,
) -> None:
"""
Push leaderboard data to the leaderboard API in batches.
"""
## first step create leaderboard version
leaderboard_version_api_url = (
f"{MOONSTREAM_ENGINE_URL}/leaderboard/{leaderboard_id}/versions"
)
json_data = {
"publish": False,
}
leaderboard_api_response = requests.post(
leaderboard_version_api_url, json=json_data, headers=headers, timeout=10
)
try:
leaderboard_api_response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not create leaderboard version: {http_error.response.text} with status code {http_error.response.status_code}"
)
return
leaderboard_version_id = leaderboard_api_response.json()["version"]
## second step push data to leaderboard version
leaderboard_version_push_api_url = f"{MOONSTREAM_ENGINE_URL}/leaderboard/{leaderboard_id}/versions/{leaderboard_version_id}/scores?normalize_addresses={leaderboard_config['normalize_addresses']}&overwrite=false"
chunks = [data[x : x + batch_size] for x in range(0, len(data), batch_size)]
send_data_to_endpoint(
chunks, leaderboard_version_push_api_url, headers, timeout=timeout
)
## third step publish leaderboard version
leaderboard_version_publish_api_url = f"{MOONSTREAM_ENGINE_URL}/leaderboard/{leaderboard_id}/versions/{leaderboard_version_id}"
json_data = {
"publish": True,
}
try:
leaderboard_api_response = requests.put(
leaderboard_version_publish_api_url,
json=json_data,
headers=headers,
timeout=10,
)
leaderboard_api_response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not publish leaderboard version: {http_error.response.text} with status code {http_error.response.status_code}"
)
return
## delete leaderboard version -1
try:
leaderboard_version_delete_api_url = f"{MOONSTREAM_ENGINE_URL}/leaderboard/{leaderboard_id}/versions/{leaderboard_version_id - 1}"
leaderboard_api_response = requests.delete(
leaderboard_version_delete_api_url,
headers=headers,
timeout=timeout,
)
leaderboard_api_response.raise_for_status()
except requests.exceptions.HTTPError as http_error:
logger.error(
f"Could not delete leaderboard version: {http_error.response.text} with status code {http_error.response.status_code}"
)
return

Wyświetl plik

@ -241,6 +241,7 @@ multicall_contracts: Dict[AvailableBlockchainType, str] = {
AvailableBlockchainType.POLYGON: "0xc8E51042792d7405184DfCa245F2d27B94D013b6",
AvailableBlockchainType.MUMBAI: "0xe9939e7Ea7D7fb619Ac57f648Da7B1D425832631",
AvailableBlockchainType.ETHEREUM: "0x5BA1e12693Dc8F9c48aAD8770482f4739bEeD696",
AvailableBlockchainType.ZKSYNC_ERA: "0xF9cda624FBC7e059355ce98a31693d299FACd963",
}
@ -321,3 +322,7 @@ if MOONSTREAM_LEADERBOARD_GENERATOR_JOURNAL_ID == "":
raise ValueError(
"MOONSTREAM_LEADERBOARD_GENERATOR_JOURNAL_ID environment variable must be set"
)
MOONSTREAM_LEADERBOARD_GENERATOR_BATCH_SIZE = 12000
MOONSTREAM_LEADERBOARD_GENERATOR_PUSH_TIMEOUT_SECONDS = 60

Wyświetl plik

@ -0,0 +1,90 @@
[
{
"constant": true,
"inputs": [],
"name": "getReserves",
"outputs": [
{
"internalType": "uint",
"name": "",
"type": "uint256"
},
{
"internalType": "uint",
"name": "",
"type": "uint256"
}
],
"payable": false,
"address": "0x80115c708E12eDd42E504c1cD52Aea96C547c05c",
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "getReserves",
"outputs": [
{
"internalType": "uint112",
"name": "_reserve0",
"type": "uint112"
},
{
"internalType": "uint112",
"name": "_reserve1",
"type": "uint112"
},
{
"internalType": "uint32",
"name": "_blockTimestampLast",
"type": "uint32"
}
],
"payable": false,
"address": "0xb85feb6aF3412d690DFDA280b73EaED73a2315bC",
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "getReserves",
"outputs": [
{
"internalType": "uint112",
"name": "_reserve0",
"type": "uint112"
},
{
"internalType": "uint112",
"name": "_reserve1",
"type": "uint112"
},
{
"internalType": "uint32",
"name": "_blockTimestampLast",
"type": "uint32"
}
],
"payable": false,
"address": "0xDFAaB828f5F515E104BaaBa4d8D554DA9096f0e4",
"stateMutability": "view",
"type": "function"
},
{
"constant": true,
"inputs": [],
"name": "totalSupply",
"outputs": [
{
"name": "",
"type": "uint256"
}
],
"payable": false,
"stateMutability": "view",
"type": "function",
"address": "0xDFAaB828f5F515E104BaaBa4d8D554DA9096f0e4"
}
]

Wyświetl plik

@ -1,7 +1,7 @@
"""Live at for metatx
Revision ID: 6d07739cb13e
Revises: cc80e886e153
Revises: 71e888082a6d
Create Date: 2023-12-06 14:33:04.814144
"""
@ -11,7 +11,7 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '6d07739cb13e'
down_revision = 'cc80e886e153'
down_revision = '71e888082a6d'
branch_labels = None
depends_on = None

Wyświetl plik

@ -0,0 +1,50 @@
"""leaderboard metadata
Revision ID: 71e888082a6d
Revises: cc80e886e153
Create Date: 2023-11-15 13:21:16.108399
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "71e888082a6d"
down_revision = "cc80e886e153"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"leaderboards",
sa.Column(
"blockchain_ids",
sa.ARRAY(sa.Integer()),
nullable=False,
server_default="{}",
),
)
op.add_column(
"leaderboards",
sa.Column(
"wallet_connect", sa.Boolean(), nullable=False, server_default="false"
),
)
op.add_column(
"leaderboards",
sa.Column(
"columns_names", postgresql.JSONB(astext_type=sa.Text()), nullable=True
),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("leaderboards", "columns_names")
op.drop_column("leaderboards", "wallet_connect")
op.drop_column("leaderboards", "blockchain_ids")
# ### end Alembic commands ###

Wyświetl plik

@ -5,7 +5,13 @@ from typing import List, Any, Optional, Dict, Union, Tuple, cast
import uuid
import logging
from bugout.data import BugoutResource, BugoutSearchResult
from bugout.data import (
BugoutResource,
BugoutSearchResult,
ResourcePermissions,
HolderType,
BugoutResourceHolder,
)
from eth_typing import Address
from hexbytes import HexBytes
import requests # type: ignore
@ -16,7 +22,14 @@ from sqlalchemy.engine import Row
from web3 import Web3
from web3.types import ChecksumAddress
from .data import Score, LeaderboardScore, LeaderboardConfigUpdate, LeaderboardConfig
from .data import (
Score,
LeaderboardScore,
LeaderboardConfigUpdate,
LeaderboardConfig,
LeaderboardPosition,
ColumnsNames,
)
from .contracts import Dropper_interface, ERC20_interface, Terminus_interface
from .models import (
DropperClaimant,
@ -96,6 +109,10 @@ class LeaderboardVersionNotFound(Exception):
pass
class LeaderboardAssignResourceError(Exception):
pass
BATCH_SIGNATURE_PAGE_SIZE = 500
logger = logging.getLogger(__name__)
@ -1029,7 +1046,7 @@ def get_leaderboard_info(
version_number=version_number,
)
leaderboard = (
query = (
db_session.query(
Leaderboard.id,
Leaderboard.title,
@ -1038,26 +1055,33 @@ def get_leaderboard_info(
func.max(LeaderboardScores.updated_at).label("last_update"),
)
.join(
LeaderboardScores,
LeaderboardScores.leaderboard_id == Leaderboard.id,
LeaderboardVersion,
and_(
LeaderboardVersion.leaderboard_id == Leaderboard.id,
LeaderboardVersion.published == True,
),
isouter=True,
)
.join(
LeaderboardVersion,
LeaderboardScores,
and_(
LeaderboardVersion.leaderboard_id == LeaderboardScores.leaderboard_id,
LeaderboardVersion.version_number
== LeaderboardScores.leaderboard_version_number,
LeaderboardScores.leaderboard_id == Leaderboard.id,
LeaderboardScores.leaderboard_version_number
== LeaderboardVersion.version_number,
),
isouter=True,
)
.filter(
LeaderboardVersion.published == True,
LeaderboardVersion.version_number == latest_version,
or_(
LeaderboardVersion.published == None,
LeaderboardVersion.version_number == latest_version,
)
)
.filter(Leaderboard.id == leaderboard_id)
.group_by(Leaderboard.id, Leaderboard.title, Leaderboard.description)
).one()
)
leaderboard = query.one()
return leaderboard
@ -1222,9 +1246,46 @@ def get_position(
return query.all()
def get_leaderboard_positions(
def get_leaderboard_score(
db_session: Session,
leaderboard_id,
address,
version_number: Optional[int] = None,
) -> Optional[LeaderboardScores]:
"""
Return address score
"""
latest_version = leaderboard_version_filter(
db_session=db_session,
leaderboard_id=leaderboard_id,
version_number=version_number,
)
query = (
db_session.query(LeaderboardScores)
.join(
LeaderboardVersion,
and_(
LeaderboardVersion.leaderboard_id == LeaderboardScores.leaderboard_id,
LeaderboardVersion.version_number
== LeaderboardScores.leaderboard_version_number,
),
)
.filter(
LeaderboardVersion.published == True,
LeaderboardVersion.version_number == latest_version,
)
.filter(LeaderboardScores.leaderboard_id == leaderboard_id)
.filter(LeaderboardScores.address == address)
)
return query.one_or_none()
def get_leaderboard_positions(
db_session: Session,
leaderboard_id: uuid.UUID,
limit: int,
offset: int,
version_number: Optional[int] = None,
@ -1437,31 +1498,47 @@ def create_leaderboard(
title: str,
description: Optional[str],
token: Optional[Union[uuid.UUID, str]] = None,
wallet_connect: bool = False,
blockchain_ids: List[int] = [],
columns_names: ColumnsNames = None,
) -> Leaderboard:
"""
Create a leaderboard
"""
if columns_names is not None:
columns_names = columns_names.dict()
if not token:
token = uuid.UUID(MOONSTREAM_ADMIN_ACCESS_TOKEN)
try:
leaderboard = Leaderboard(title=title, description=description)
# deduplicate and sort
blockchain_ids = sorted(list(set(blockchain_ids)))
leaderboard = Leaderboard(
title=title,
description=description,
wallet_connect=wallet_connect,
blockchain_ids=blockchain_ids,
columns_names=columns_names,
)
db_session.add(leaderboard)
db_session.commit()
user = None
if token is not None:
user = bc.get_user(token=token)
resource = create_leaderboard_resource(
leaderboard_id=str(leaderboard.id),
token=token,
user_id=str(user.id) if user is not None else None,
)
leaderboard.resource_id = resource.id
db_session.commit()
except Exception as e:
db_session.rollback()
logger.error(f"Error creating leaderboard: {e}")
raise LeaderboardCreateError(f"Error creating leaderboard: {e}")
return leaderboard
@ -1504,6 +1581,9 @@ def update_leaderboard(
leaderboard_id: uuid.UUID,
title: Optional[str],
description: Optional[str],
wallet_connect: Optional[bool],
blockchain_ids: Optional[List[int]],
columns_names: Optional[ColumnsNames],
) -> Leaderboard:
"""
Update a leaderboard
@ -1517,6 +1597,23 @@ def update_leaderboard(
leaderboard.title = title
if description is not None:
leaderboard.description = description
if wallet_connect is not None:
leaderboard.wallet_connect = wallet_connect
if blockchain_ids is not None:
# deduplicate and sort
blockchain_ids = sorted(list(set(blockchain_ids)))
leaderboard.blockchain_ids = blockchain_ids
if columns_names is not None:
if leaderboard.columns_names is not None:
current_columns_names = ColumnsNames(**leaderboard.columns_names)
for key, value in columns_names.dict(exclude_none=True).items():
setattr(current_columns_names, key, value)
else:
current_columns_names = columns_names
leaderboard.columns_names = current_columns_names.dict()
db_session.commit()
@ -1615,38 +1712,62 @@ def add_scores(
# leaderboard access actions
def create_leaderboard_resource(
leaderboard_id: str, token: Union[Optional[uuid.UUID], str] = None
) -> BugoutResource:
def create_leaderboard_resource(leaderboard_id: str, user_id: Optional[str] = None):
resource_data: Dict[str, Any] = {
"type": LEADERBOARD_RESOURCE_TYPE,
"leaderboard_id": leaderboard_id,
}
if token is None:
token = MOONSTREAM_ADMIN_ACCESS_TOKEN
try:
resource = bc.create_resource(
token=token,
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
application_id=MOONSTREAM_APPLICATION_ID,
resource_data=resource_data,
timeout=10,
)
except Exception as e:
raise LeaderboardCreateError(f"Error creating leaderboard resource: {e}")
if user_id is not None:
try:
bc.add_resource_holder_permissions(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
resource_id=resource.id,
holder_permissions=BugoutResourceHolder(
holder_type=HolderType.user,
holder_id=user_id,
permissions=[
ResourcePermissions.ADMIN,
ResourcePermissions.READ,
ResourcePermissions.UPDATE,
ResourcePermissions.DELETE,
],
),
)
except Exception as e:
raise LeaderboardCreateError(
f"Error adding resource holder permissions: {e}"
)
return resource
def assign_resource(
db_session: Session,
leaderboard_id: uuid.UUID,
user_token: Union[uuid.UUID, str],
user_token: Optional[Union[uuid.UUID, str]] = None,
resource_id: Optional[uuid.UUID] = None,
):
"""
Assign a resource handler to a leaderboard
"""
### get user_name from token
user = None
if user_token is not None:
user = bc.get_user(token=user_token)
leaderboard = (
db_session.query(Leaderboard).filter(Leaderboard.id == leaderboard_id).one() # type: ignore
)
@ -1654,11 +1775,9 @@ def assign_resource(
if resource_id is not None:
leaderboard.resource_id = resource_id
else:
# Create resource via admin token
resource = create_leaderboard_resource(
leaderboard_id=str(leaderboard_id),
token=user_token,
user_id=user.id if user is not None else None,
)
leaderboard.resource_id = resource.id

Wyświetl plik

@ -372,11 +372,22 @@ class LeaderboardScore(BaseModel):
points_data: Dict[str, Any]
class ColumnsNames(BaseModel):
rank: Optional[str] = None
address: Optional[str] = None
score: Optional[str] = None
points_data: Optional[str] = None
points_data_fields: Optional[Dict[str, str]] = None
class Leaderboard(BaseModel):
id: UUID
title: str
description: Optional[str] = None
resource_id: Optional[UUID] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
created_at: datetime
updated_at: datetime
@ -392,6 +403,9 @@ class LeaderboardInfoResponse(BaseModel):
class LeaderboardCreateRequest(BaseModel):
title: str
description: Optional[str] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
class LeaderboardCreatedResponse(BaseModel):
@ -399,6 +413,9 @@ class LeaderboardCreatedResponse(BaseModel):
title: str
description: Optional[str] = None
resource_id: Optional[UUID] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
created_at: datetime
updated_at: datetime
@ -411,6 +428,9 @@ class LeaderboardUpdatedResponse(BaseModel):
title: str
description: Optional[str] = None
resource_id: Optional[UUID] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
created_at: datetime
updated_at: datetime
@ -421,6 +441,9 @@ class LeaderboardUpdatedResponse(BaseModel):
class LeaderboardUpdateRequest(BaseModel):
title: Optional[str] = None
description: Optional[str] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
class LeaderboardDeletedResponse(BaseModel):
@ -428,6 +451,9 @@ class LeaderboardDeletedResponse(BaseModel):
title: str
description: Optional[str] = None
resource_id: Optional[UUID] = None
wallet_connect: bool = False
blockchain_ids: List[int] = Field(default_factory=list)
columns_names: Optional[ColumnsNames] = None
created_at: datetime
updated_at: datetime

Wyświetl plik

@ -1,6 +1,7 @@
import uuid
from sqlalchemy import (
ARRAY,
DECIMAL,
VARCHAR,
BigInteger,
@ -337,7 +338,6 @@ class CallRequest(Base):
class Leaderboard(Base): # type: ignore
__tablename__ = "leaderboards"
# __table_args__ = (UniqueConstraint("dropper_contract_id", "address"),)
id = Column(
UUID(as_uuid=True),
@ -349,6 +349,10 @@ class Leaderboard(Base): # type: ignore
title = Column(VARCHAR(128), nullable=False)
description = Column(String, nullable=True)
resource_id = Column(UUID(as_uuid=True), nullable=True, index=True)
blockchain_ids = Column(ARRAY(Integer), nullable=False, default=[])
wallet_connect = Column(Boolean, default=False, nullable=False)
columns_names = Column(JSONB, nullable=True)
created_at = Column(
DateTime(timezone=True), server_default=utcnow(), nullable=False
)

Wyświetl plik

@ -2,7 +2,7 @@
Leaderboard API.
"""
import logging
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, Any, Union
from uuid import UUID
from bugout.exceptions import BugoutResponseException
@ -60,6 +60,7 @@ leaderboard_whitelist = {
"/leaderboard/": "GET",
"/leaderboard/rank": "GET",
"/leaderboard/ranks": "GET",
"/leaderboard/scores": "GET",
"/scores/changes": "GET",
"/leaderboard/docs": "GET",
"/leaderboard/openapi.json": "GET",
@ -91,9 +92,12 @@ app.add_middleware(
"",
response_model=List[data.LeaderboardPosition],
tags=["Public Endpoints"],
include_in_schema=False,
)
@app.get("/", response_model=List[data.LeaderboardPosition], tags=["Public Endpoints"])
@app.get(
"/",
response_model=List[data.LeaderboardPosition],
tags=["Public Endpoints"],
)
async def leaderboard(
leaderboard_id: UUID = Query(..., description="Leaderboard ID"),
limit: int = Query(10),
@ -107,7 +111,7 @@ async def leaderboard(
### Check if leaderboard exists
try:
actions.get_leaderboard_by_id(db_session, leaderboard_id)
leaderboard = actions.get_leaderboard_by_id(db_session, leaderboard_id)
except NoResultFound as e:
raise EngineHTTPException(
status_code=404,
@ -118,8 +122,9 @@ async def leaderboard(
raise EngineHTTPException(status_code=500, detail="Internal server error")
leaderboard_positions = actions.get_leaderboard_positions(
db_session, leaderboard_id, limit, offset, version
db_session, leaderboard.id, limit, offset, version
)
result = [
data.LeaderboardPosition(
address=position.address,
@ -149,7 +154,6 @@ async def create_leaderboard(
Authorization: str = AuthHeader,
) -> data.LeaderboardCreatedResponse:
"""
Create leaderboard.
"""
@ -161,6 +165,9 @@ async def create_leaderboard(
title=leaderboard.title,
description=leaderboard.description,
token=token,
wallet_connect=leaderboard.wallet_connect,
blockchain_ids=leaderboard.blockchain_ids,
columns_names=leaderboard.columns_names,
)
except actions.LeaderboardCreateError as e:
logger.error(f"Error while creating leaderboard: {e}")
@ -176,12 +183,15 @@ async def create_leaderboard(
# Add resource to the leaderboard
return data.LeaderboardCreatedResponse(
id=created_leaderboard.id, # type: ignore
title=created_leaderboard.title, # type: ignore
description=created_leaderboard.description, # type: ignore
resource_id=created_leaderboard.resource_id, # type: ignore
created_at=created_leaderboard.created_at, # type: ignore
updated_at=created_leaderboard.updated_at, # type: ignore
id=created_leaderboard.id,
title=created_leaderboard.title,
description=created_leaderboard.description,
resource_id=created_leaderboard.resource_id,
wallet_connect=created_leaderboard.wallet_connect,
blockchain_ids=created_leaderboard.blockchain_ids,
columns_names=created_leaderboard.columns_names,
created_at=created_leaderboard.created_at,
updated_at=created_leaderboard.updated_at,
)
@ -225,6 +235,9 @@ async def update_leaderboard(
leaderboard_id=leaderboard_id,
title=leaderboard.title,
description=leaderboard.description,
wallet_connect=leaderboard.wallet_connect,
blockchain_ids=leaderboard.blockchain_ids,
columns_names=leaderboard.columns_names,
)
except actions.LeaderboardUpdateError as e:
logger.error(f"Error while updating leaderboard: {e}")
@ -238,12 +251,15 @@ async def update_leaderboard(
raise EngineHTTPException(status_code=500, detail="Internal server error")
return data.LeaderboardUpdatedResponse(
id=updated_leaderboard.id, # type: ignore
title=updated_leaderboard.title, # type: ignore
description=updated_leaderboard.description, # type: ignore
resource_id=updated_leaderboard.resource_id, # type: ignore
created_at=updated_leaderboard.created_at, # type: ignore
updated_at=updated_leaderboard.updated_at, # type: ignore
id=updated_leaderboard.id,
title=updated_leaderboard.title,
description=updated_leaderboard.description,
resource_id=updated_leaderboard.resource_id,
wallet_connect=updated_leaderboard.wallet_connect,
blockchain_ids=updated_leaderboard.blockchain_ids,
columns_names=updated_leaderboard.columns_names,
created_at=updated_leaderboard.created_at,
updated_at=updated_leaderboard.updated_at,
)
@ -298,11 +314,15 @@ async def delete_leaderboard(
raise EngineHTTPException(status_code=500, detail="Internal server error")
return data.LeaderboardDeletedResponse(
id=deleted_leaderboard.id, # type: ignore
title=deleted_leaderboard.title, # type: ignore
description=deleted_leaderboard.description, # type: ignore
created_at=deleted_leaderboard.created_at, # type: ignore
updated_at=deleted_leaderboard.updated_at, # type: ignore
id=deleted_leaderboard.id,
title=deleted_leaderboard.title,
description=deleted_leaderboard.description,
resource_id=deleted_leaderboard.resource_id,
wallet_connect=deleted_leaderboard.wallet_connect,
blockchain_ids=deleted_leaderboard.blockchain_ids,
columns_names=deleted_leaderboard.columns_names,
created_at=deleted_leaderboard.created_at,
updated_at=deleted_leaderboard.updated_at,
)
@ -335,12 +355,15 @@ async def get_leaderboards(
results = [
data.Leaderboard(
id=leaderboard.id, # type: ignore
title=leaderboard.title, # type: ignore
description=leaderboard.description, # type: ignore
resource_id=leaderboard.resource_id, # type: ignore
created_at=leaderboard.created_at, # type: ignore
updated_at=leaderboard.updated_at, # type: ignore
id=leaderboard.id,
title=leaderboard.title,
description=leaderboard.description,
resource_id=leaderboard.resource_id,
wallet_connect=leaderboard.wallet_connect,
blockchain_ids=leaderboard.blockchain_ids,
columns_names=leaderboard.columns_names,
created_at=leaderboard.created_at,
updated_at=leaderboard.updated_at,
)
for leaderboard in leaderboards
]
@ -452,7 +475,7 @@ async def quartiles(
"""
### Check if leaderboard exists
try:
actions.get_leaderboard_by_id(db_session, leaderboard_id)
leaderboard = actions.get_leaderboard_by_id(db_session, leaderboard_id)
except NoResultFound as e:
raise EngineHTTPException(
status_code=404,
@ -471,12 +494,14 @@ async def quartiles(
logger.error(f"Error while getting quartiles: {e}")
raise EngineHTTPException(status_code=500, detail="Internal server error")
return data.QuartilesResponse(
percentile_25={"address": q1[0], "score": q1[1], "rank": q1[2]},
percentile_50={"address": q2[0], "score": q2[1], "rank": q2[2]},
percentile_75={"address": q3[0], "score": q3[1], "rank": q3[2]},
result = data.QuartilesResponse(
percentile_25={"address": q1.address, "rank": q1.rank, "score": q1.score},
percentile_50={"address": q2.address, "rank": q2.rank, "score": q2.score},
percentile_75={"address": q3.address, "rank": q3.rank, "score": q3.score},
)
return result
@app.get(
"/position",
@ -502,7 +527,7 @@ async def position(
### Check if leaderboard exists
try:
actions.get_leaderboard_by_id(db_session, leaderboard_id)
leaderboard = actions.get_leaderboard_by_id(db_session, leaderboard_id)
except NoResultFound as e:
raise EngineHTTPException(
status_code=404,
@ -539,7 +564,9 @@ async def position(
@app.get(
"/rank", response_model=List[data.LeaderboardPosition], tags=["Public Endpoints"]
"/rank",
response_model=List[data.LeaderboardPosition],
tags=["Public Endpoints"],
)
async def rank(
leaderboard_id: UUID = Query(..., description="Leaderboard ID"),
@ -555,7 +582,7 @@ async def rank(
### Check if leaderboard exists
try:
actions.get_leaderboard_by_id(db_session, leaderboard_id)
leaderboard = actions.get_leaderboard_by_id(db_session, leaderboard_id)
except NoResultFound as e:
raise EngineHTTPException(
status_code=404,
@ -573,14 +600,15 @@ async def rank(
offset=offset,
version_number=version,
)
results = [
data.LeaderboardPosition(
address=rank_position.address,
score=rank_position.score,
rank=rank_position.rank,
points_data=rank_position.points_data,
address=position.address,
score=position.score,
rank=position.rank,
points_data=position.points_data,
)
for rank_position in leaderboard_rank
for position in leaderboard_rank
]
return results
@ -619,6 +647,57 @@ async def ranks(
return results
@app.get(
"/scores",
response_model=data.LeaderboardScore,
tags=["Public Endpoints"],
)
async def leaderboard_score(
address: str = Query(..., description="Address to get position for."),
leaderboard_id: UUID = Query(..., description="Leaderboard ID"),
version: Optional[int] = Query(None, description="Version of the leaderboard."),
normalize_addresses: bool = Query(
True, description="Normalize addresses to checksum."
),
db_session: Session = Depends(db.yield_db_session),
) -> data.LeaderboardScore:
"""
Returns the leaderboard posotion for the given address.
"""
### Check if leaderboard exists
try:
actions.get_leaderboard_by_id(db_session, leaderboard_id)
except NoResultFound as e:
raise EngineHTTPException(
status_code=404,
detail="Leaderboard not found.",
)
except Exception as e:
logger.error(f"Error while getting leaderboard: {e}")
raise EngineHTTPException(status_code=500, detail="Internal server error")
if normalize_addresses:
address = Web3.toChecksumAddress(address)
score = actions.get_leaderboard_score(
db_session,
leaderboard_id,
address,
version,
)
if score is None:
raise EngineHTTPException(status_code=204, detail="Score not found.")
return data.LeaderboardScore(
leaderboard_id=score.leaderboard_id,
address=score.address,
score=score.score,
points_data=score.points_data,
)
@app.put(
"/{leaderboard_id}/scores",
response_model=List[data.LeaderboardScore],

Wyświetl plik

@ -7,7 +7,7 @@ base58==2.1.1
bitarray==2.7.6
boto3==1.27.0
botocore==1.30.0
bugout==0.2.14
bugout==0.2.15
certifi==2023.5.7
charset-normalizer==3.1.0
click==8.1.3

Wyświetl plik

@ -13,7 +13,7 @@ setup(
packages=find_packages(),
install_requires=[
"boto3",
"bugout>=0.2.14",
"bugout>=0.2.15",
"eip712==0.1.0",
"eth-typing>=2.3.0",
"fastapi",

Wyświetl plik

@ -15,6 +15,9 @@ from bugout.data import (
BugoutResources,
BugoutSearchResult,
BugoutSearchResults,
BugoutResourceHolder,
HolderType,
ResourcePermissions,
)
from bugout.exceptions import BugoutResponseException
from bugout.journal import SearchOrder
@ -711,11 +714,7 @@ def generate_journal_for_user(
}
try:
bc.create_resource(
token=token,
application_id=MOONSTREAM_APPLICATION_ID,
resource_data=resource_data,
)
create_resource_for_user(user_id=user_id, resource_data=resource_data)
except BugoutResponseException as e:
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
@ -851,6 +850,8 @@ def get_list_of_support_interfaces(
Returns list of interfaces supported by given address
"""
result = {}
try:
_, _, is_contract = check_if_smart_contract(
blockchain_type=blockchain_type, address=address, user_token=user_token
@ -866,8 +867,6 @@ def get_list_of_support_interfaces(
abi=supportsInterface_abi,
)
result = {}
if blockchain_type in multicall_contracts:
calls = []
@ -952,3 +951,57 @@ def check_if_smart_contract(
is_contract = True
return blockchain_type, address, is_contract
def create_resource_for_user(
user_id: uuid.UUID,
resource_data: Dict[str, Any],
) -> BugoutResource:
"""
Create resource for user
"""
try:
resource = bc.create_resource(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
application_id=MOONSTREAM_APPLICATION_ID,
resource_data=resource_data,
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
)
except BugoutResponseException as e:
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
logger.error(f"Error creating resource: {str(e)}")
raise MoonstreamHTTPException(status_code=500, internal_error=e)
try:
bc.add_resource_holder_permissions(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
resource_id=resource.id,
holder_permissions=BugoutResourceHolder(
holder_type=HolderType.user,
holder_id=user_id,
permissions=[
ResourcePermissions.ADMIN,
ResourcePermissions.READ,
ResourcePermissions.UPDATE,
ResourcePermissions.DELETE,
],
),
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
)
except BugoutResponseException as e:
logger.error(
f"Error adding resource holder permissions to resource resource {str(resource.id)} {str(e)}"
)
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
bc.delete_resource(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
resource_id=resource.id,
)
logger.error(
f"Error adding resource holder permissions to resource {str(resource.id)} {str(e)}"
)
raise MoonstreamHTTPException(status_code=500, internal_error=e)
return resource

Wyświetl plik

@ -25,13 +25,14 @@ from ..actions import (
get_query_by_name,
name_normalization,
query_parameter_hash,
create_resource_for_user,
)
from ..middleware import MoonstreamHTTPException
from ..settings import (
MOONSTREAM_ADMIN_ACCESS_TOKEN,
MOONSTREAM_APPLICATION_ID,
MOONSTREAM_CRAWLERS_SERVER_PORT,
MOONSTREAM_CRAWLERS_SERVER_URL,
MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS,
MOONSTREAM_QUERIES_JOURNAL_ID,
MOONSTREAM_QUERY_TEMPLATE_CONTEXT_TYPE,
MOONSTREAM_S3_QUERIES_BUCKET,
@ -129,24 +130,16 @@ async def create_query_handler(
except Exception as e:
raise MoonstreamHTTPException(status_code=500, internal_error=e)
try:
# create resource query_name_resolver
bc.create_resource(
token=token,
application_id=MOONSTREAM_APPLICATION_ID,
resource_data={
"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER,
"user_id": str(user.id),
"user": str(user.username),
"name": query_name,
"entry_id": str(entry.id),
},
)
except BugoutResponseException as e:
logger.error(f"Error creating name resolving resource: {str(e)}")
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:
raise MoonstreamHTTPException(status_code=500, internal_error=e)
create_resource_for_user(
user_id=user.id,
resource_data={
"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER,
"user_id": str(user.id),
"user": str(user.username),
"name": query_name,
"entry_id": str(entry.id),
},
)
try:
bc.update_tags(
@ -354,7 +347,7 @@ async def update_query_handler(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
entry_id=query_id,
title=query_name,
title=f"Query:{query_name}",
content=request_update.query,
tags=["preapprove"],
)
@ -473,7 +466,7 @@ async def update_query_data_handler(
if request_update.blockchain
else None,
},
timeout=5,
timeout=MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS,
)
except Exception as e:
logger.error(f"Error interaction with crawlers: {str(e)}")
@ -619,7 +612,9 @@ async def remove_query_handler(
raise MoonstreamHTTPException(status_code=404, detail="Query does not exists")
try:
bc.delete_resource(token=token, resource_id=query_ids[query_name][0])
bc.delete_resource(
token=MOONSTREAM_ADMIN_ACCESS_TOKEN, resource_id=query_ids[query_name][0]
)
except BugoutResponseException as e:
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
except Exception as e:

Wyświetl plik

@ -259,3 +259,17 @@ supportsInterface_abi = [
"type": "function",
}
]
MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS_RAW = os.environ.get(
"MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS"
)
MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS = 10
try:
if MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS_RAW is not None:
MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS = int(
MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS_RAW
)
except:
raise Exception(
f"Could not parse MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS as int: {MOONSTREAM_INTERNAL_REQUEST_TIMEOUT_SECONDS_RAW}"
)

Wyświetl plik

@ -9,7 +9,7 @@ base58==2.1.1
bitarray==2.6.0
boto3==1.26.5
botocore==1.29.5
bugout>=0.2.13
bugout>=0.2.15
certifi==2022.9.24
charset-normalizer==2.1.1
click==8.1.3

Wyświetl plik

@ -13,7 +13,7 @@ setup(
install_requires=[
"appdirs",
"boto3",
"bugout>=0.2.13",
"bugout>=0.2.15",
"fastapi",
"moonstreamdb>=0.3.5",
"humbug",

Wyświetl plik

@ -198,7 +198,7 @@ func (bpool *BlockchainPool) HealthCheck() {
for _, b := range bpool.Blockchains {
var timeout time.Duration
getLatestBlockReq := `{"jsonrpc":"2.0","method":"eth_getBlockByNumber","params":["latest", false],"id":1}`
if b.Blockchain == "starknet" {
if b.Blockchain == "starknet" || b.Blockchain == "starknet-goerli" {
getLatestBlockReq = `{"jsonrpc":"2.0","method":"starknet_getBlockWithTxHashes","params":["latest"],"id":"0"}`
timeout = NB_HEALTH_CHECK_CALL_TIMEOUT * 2
}
@ -241,7 +241,7 @@ func (bpool *BlockchainPool) HealthCheck() {
}
var blockNumber uint64
if b.Blockchain == "starknet" {
if b.Blockchain == "starknet" || b.Blockchain == "starknet-goerli" {
blockNumber = statusResponse.Result.BlockNumber
} else {
blockNumberHex := strings.Replace(statusResponse.Result.Number, "0x", "", -1)

Wyświetl plik

@ -38,7 +38,7 @@ var (
NB_CONNECTION_RETRIES = 2
NB_CONNECTION_RETRIES_INTERVAL = time.Millisecond * 10
NB_HEALTH_CHECK_INTERVAL = time.Millisecond * 5000
NB_HEALTH_CHECK_INTERVAL = os.Getenv("NB_HEALTH_CHECK_INTERVAL")
NB_HEALTH_CHECK_CALL_TIMEOUT = time.Second * 2
NB_CACHE_CLEANING_INTERVAL = time.Second * 10

Wyświetl plik

@ -12,6 +12,7 @@ import (
"net/http/httputil"
"net/url"
"os"
"strconv"
"strings"
"time"
@ -28,7 +29,11 @@ var (
// initHealthCheck runs a routine for check status of the nodes every 5 seconds
func initHealthCheck(debug bool) {
t := time.NewTicker(NB_HEALTH_CHECK_INTERVAL)
healthCheckInterval, convErr := strconv.Atoi(NB_HEALTH_CHECK_INTERVAL)
if convErr != nil {
healthCheckInterval = 5
}
t := time.NewTicker(time.Second * time.Duration(healthCheckInterval))
for {
select {
case <-t.C: