Merge branch 'main' into modify-label-table

pull/313/head
Neeraj Kashyap 2021-10-20 09:36:05 -07:00
commit 6b5b6049b5
41 zmienionych plików z 5718 dodań i 787 usunięć

Wyświetl plik

@ -3,28 +3,71 @@ The Moonstream HTTP API
"""
import logging
import time
from typing import Dict
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from . import actions
from . import data
from .middleware import MoonstreamHTTPException
from .routes.address_info import app as addressinfo_api
from .routes.nft import app as nft_api
from .routes.whales import app as whales_api
from .routes.subscriptions import app as subscriptions_api
from .routes.streams import app as streams_api
from .routes.txinfo import app as txinfo_api
from .routes.users import app as users_api
from .settings import ORIGINS
from .routes.address_info import router as addressinfo_router
from .routes.nft import router as nft_router
from .routes.streams import router as streams_router
from .routes.subscriptions import router as subscriptions_router
from .routes.txinfo import router as txinfo_router
from .routes.users import router as users_router
from .routes.whales import router as whales_router
from .middleware import BroodAuthMiddleware, MoonstreamHTTPException
from .settings import DOCS_TARGET_PATH, ORIGINS
from .version import MOONSTREAM_VERSION
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
app = FastAPI(openapi_url=None)
tags_metadata = [
{"name": "addressinfo", "description": "Blockchain addresses public information."},
{
"name": "labels",
"description": "Labels for transactions, addresses with additional information.",
},
{"name": "nft", "description": "NFT market summaries."},
{"name": "streams", "description": "Operations with data streams and filters."},
{"name": "subscriptions", "description": "Operations with user subscriptions."},
{"name": "time", "description": "Server timestamp endpoints."},
{"name": "tokens", "description": "Operations with user tokens."},
{"name": "txinfo", "description": "Ethereum transactions info."},
{"name": "users", "description": "Operations with users."},
{"name": "whales", "description": "Whales summaries"},
]
app = FastAPI(
title=f"Moonstream API",
description="Moonstream API endpoints.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
)
whitelist_paths: Dict[str, str] = {}
whitelist_paths.update(
{
"/ping": "GET",
"/version": "GET",
"/now": "GET",
"/docs": "GET",
"/openapi.json": "GET",
"/streams/info": "GET",
"/subscriptions/types": "GET",
"/users": "POST",
"/users/token": "POST",
"/users/password/reset_initiate": "POST",
"/users/password/reset_complete": "POST",
}
)
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
@ -36,23 +79,32 @@ app.add_middleware(
@app.get("/ping", response_model=data.PingResponse)
async def ping_handler() -> data.PingResponse:
"""
Check server status.
"""
return data.PingResponse(status="ok")
@app.get("/version", response_model=data.VersionResponse)
async def version_handler() -> data.VersionResponse:
"""
Get server version.
"""
return data.VersionResponse(version=MOONSTREAM_VERSION)
@app.get("/now", tags=["time"])
async def now_handler() -> data.NowResponse:
"""
Get server current time.
"""
return data.NowResponse(epoch_time=time.time())
@app.get("/status", response_model=data.StatusResponse)
async def status_handler() -> data.StatusResponse:
"""
Get latest records and their creation timestamp for crawlers:
Find latest crawlers records with creation timestamp:
- ethereum_txpool
- ethereum_trending
"""
@ -70,10 +122,10 @@ async def status_handler() -> data.StatusResponse:
)
app.mount("/subscriptions", subscriptions_api)
app.mount("/users", users_api)
app.mount("/streams", streams_api)
app.mount("/txinfo", txinfo_api)
app.mount("/address_info", addressinfo_api)
app.mount("/nft", nft_api)
app.mount("/whales", whales_api)
app.include_router(addressinfo_router)
app.include_router(nft_router)
app.include_router(streams_router)
app.include_router(subscriptions_router)
app.include_router(txinfo_router)
app.include_router(users_router)
app.include_router(whales_router)

Wyświetl plik

@ -4,7 +4,6 @@ from typing import Any, Awaitable, Callable, Dict, Optional
from bugout.data import BugoutUser
from bugout.exceptions import BugoutResponseException
from fastapi import HTTPException, Request, Response
from starlette.background import BackgroundTask
from starlette.middleware.base import BaseHTTPMiddleware
from .reporter import reporter

Wyświetl plik

@ -1,50 +1,22 @@
import logging
from typing import Dict, List, Optional
from typing import Optional
from sqlalchemy.sql.expression import true
from fastapi import FastAPI, Depends, Query
from fastapi.middleware.cors import CORSMiddleware
from fastapi import APIRouter, Depends, Query
from moonstreamdb.db import yield_db_session
from sqlalchemy.orm import Session
from .. import actions
from .. import data
from ..middleware import BroodAuthMiddleware, MoonstreamHTTPException
from ..settings import DOCS_TARGET_PATH, ORIGINS, DOCS_PATHS
from ..version import MOONSTREAM_VERSION
from ..middleware import MoonstreamHTTPException
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "addressinfo", "description": "Address public information."},
{"name": "labels", "description": "Addresses label information."},
]
app = FastAPI(
title=f"Moonstream users API.",
description="User, token and password handlers.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
router = APIRouter(
prefix="/address_info",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
whitelist_paths: Dict[str, str] = {}
whitelist_paths.update(DOCS_PATHS)
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
@app.get(
@router.get(
"/ethereum_blockchain",
tags=["addressinfo"],
response_model=data.EthereumAddressInfo,
@ -61,9 +33,9 @@ async def addressinfo_handler(
return response
@app.get(
@router.get(
"/labels/ethereum_blockchain",
tags=["labels bul"],
tags=["labels"],
response_model=data.AddressListLabelsResponse,
)
async def addresses_labels_bulk_handler(

Wyświetl plik

@ -3,55 +3,29 @@ Moonstream's /nft endpoints.
These endpoints provide public access to NFT market summaries. No authentication required.
"""
from datetime import datetime
import logging
from typing import Optional
from bugout.data import BugoutResource
from fastapi import Depends, FastAPI, Query
from moonstreamdb import db
from fastapi import APIRouter, Depends, Query
from fastapi.middleware.cors import CORSMiddleware
from moonstreamdb import db
from sqlalchemy.orm import Session
from .. import data
from ..providers.bugout import nft_summary_provider
from ..settings import (
bugout_client,
DOCS_TARGET_PATH,
MOONSTREAM_ADMIN_ACCESS_TOKEN,
MOONSTREAM_DATA_JOURNAL_ID,
ORIGINS,
)
from ..stream_queries import StreamQuery
from ..version import MOONSTREAM_VERSION
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "nft", "description": "NFT market summaries"},
]
app = FastAPI(
title=f"Moonstream /nft API",
description="User, token and password handlers.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
router = APIRouter(prefix="/nft")
@app.get("/", tags=["streams"], response_model=data.GetEventsResponse)
@router.get("/", tags=["streams"], response_model=data.GetEventsResponse)
async def stream_handler(
start_time: int = Query(0),
end_time: Optional[int] = Query(None),

Wyświetl plik

@ -5,14 +5,12 @@ import logging
from typing import Any, Dict, List, Optional
from bugout.data import BugoutResource
from fastapi import FastAPI, Request, Query, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi import APIRouter, Request, Query, Depends
from moonstreamdb import db
from sqlalchemy.orm import Session
from .. import data
from ..middleware import BroodAuthMiddleware, MoonstreamHTTPException
from ..middleware import MoonstreamHTTPException
from ..providers import (
ReceivingEventsException,
event_providers,
@ -22,47 +20,20 @@ from ..providers import (
previous_event,
)
from ..settings import (
DOCS_TARGET_PATH,
MOONSTREAM_ADMIN_ACCESS_TOKEN,
MOONSTREAM_DATA_JOURNAL_ID,
ORIGINS,
DOCS_PATHS,
bugout_client as bc,
BUGOUT_REQUEST_TIMEOUT_SECONDS,
)
from .. import stream_queries
from .subscriptions import BUGOUT_RESOURCE_TYPE_SUBSCRIPTION
from ..version import MOONSTREAM_VERSION
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "streams", "description": "Operations with data stream and filters."},
]
app = FastAPI(
title=f"Moonstream streams API.",
description="Streams endpoints.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
router = APIRouter(
prefix="/streams",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
whitelist_paths: Dict[str, str] = {"/streams/info": "GET"}
whitelist_paths.update(DOCS_PATHS)
whitelist_paths.update()
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
def get_user_subscriptions(token: str) -> Dict[str, List[BugoutResource]]:
"""
@ -89,7 +60,7 @@ def get_user_subscriptions(token: str) -> Dict[str, List[BugoutResource]]:
return user_subscriptions
@app.get("/info", tags=["streams"])
@router.get("/info", tags=["streams"])
async def info_handler() -> Dict[str, Any]:
info = {
event_type: {
@ -102,7 +73,7 @@ async def info_handler() -> Dict[str, Any]:
return info
@app.get("/", tags=["streams"], response_model=data.GetEventsResponse)
@router.get("/", tags=["streams"], response_model=data.GetEventsResponse)
async def stream_handler(
request: Request,
q: str = Query(""),
@ -159,7 +130,7 @@ async def stream_handler(
return response
@app.get("/latest", tags=["streams"])
@router.get("/latest", tags=["streams"])
async def latest_events_handler(
request: Request, q=Query(""), db_session: Session = Depends(db.yield_db_session)
) -> List[data.Event]:
@ -201,7 +172,7 @@ async def latest_events_handler(
return events
@app.get("/next", tags=["stream"])
@router.get("/next", tags=["stream"])
async def next_event_handler(
request: Request,
q: str = Query(""),
@ -256,7 +227,7 @@ async def next_event_handler(
return event
@app.get("/previous", tags=["stream"])
@router.get("/previous", tags=["stream"])
async def previous_event_handler(
request: Request,
q: str = Query(""),

Wyświetl plik

@ -2,60 +2,31 @@
The Moonstream subscriptions HTTP API
"""
import logging
from typing import Dict, List, Optional
from typing import List, Optional
from bugout.data import BugoutResource, BugoutResources
from bugout.exceptions import BugoutResponseException
from fastapi import FastAPI, Request, Form
from fastapi.middleware.cors import CORSMiddleware
from fastapi import APIRouter, Request, Form
from ..admin import subscription_types
from .. import data
from ..middleware import BroodAuthMiddleware, MoonstreamHTTPException
from ..middleware import MoonstreamHTTPException
from ..reporter import reporter
from ..settings import (
DOCS_TARGET_PATH,
DOCS_PATHS,
MOONSTREAM_APPLICATION_ID,
ORIGINS,
bugout_client as bc,
)
from ..version import MOONSTREAM_VERSION
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "subscriptions", "description": "Operations with subscriptions."},
]
app = FastAPI(
title=f"Moonstream subscriptions API.",
description="User subscriptions endpoints.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
router = APIRouter(
prefix="/subscriptions",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
whitelist_paths: Dict[str, str] = {}
whitelist_paths.update(DOCS_PATHS)
whitelist_paths.update({"/subscriptions/types": "GET"})
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
BUGOUT_RESOURCE_TYPE_SUBSCRIPTION = "subscription"
@app.post("/", tags=["subscriptions"], response_model=data.SubscriptionResourceData)
@router.post("/", tags=["subscriptions"], response_model=data.SubscriptionResourceData)
async def add_subscription_handler(
request: Request, # subscription_data: data.CreateSubscriptionRequest = Body(...)
address: str = Form(...),
@ -118,7 +89,7 @@ async def add_subscription_handler(
)
@app.delete(
@router.delete(
"/{subscription_id}",
tags=["subscriptions"],
response_model=data.SubscriptionResourceData,
@ -148,7 +119,7 @@ async def delete_subscription_handler(request: Request, subscription_id: str):
)
@app.get("/", tags=["subscriptions"], response_model=data.SubscriptionsListResponse)
@router.get("/", tags=["subscriptions"], response_model=data.SubscriptionsListResponse)
async def get_subscriptions_handler(request: Request) -> data.SubscriptionsListResponse:
"""
Get user's subscriptions.
@ -186,7 +157,7 @@ async def get_subscriptions_handler(request: Request) -> data.SubscriptionsListR
)
@app.put(
@router.put(
"/{subscription_id}",
tags=["subscriptions"],
response_model=data.SubscriptionResourceData,
@ -236,7 +207,7 @@ async def update_subscriptions_handler(
)
@app.get(
@router.get(
"/types", tags=["subscriptions"], response_model=data.SubscriptionTypesListResponse
)
async def list_subscription_types() -> data.SubscriptionTypesListResponse:

Wyświetl plik

@ -6,54 +6,24 @@ transactions, etc.) with side information and return objects that are better sui
end users.
"""
import logging
from typing import Dict, Optional
from typing import Optional
from fastapi import FastAPI, Depends
from fastapi.middleware.cors import CORSMiddleware
from fastapi import APIRouter, Depends
from moonstreamdb.db import yield_db_session
from moonstreamdb.models import EthereumAddress
from sqlalchemy.orm import Session
from ..abi_decoder import decode_abi
from .. import actions
from .. import data
from ..middleware import BroodAuthMiddleware
from ..settings import DOCS_TARGET_PATH, ORIGINS, DOCS_PATHS
from ..version import MOONSTREAM_VERSION
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "txinfo", "description": "Ethereum transactions info."},
]
app = FastAPI(
title=f"Moonstream /txinfo API.",
description="User, token and password handlers.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
whitelist_paths: Dict[str, str] = {}
whitelist_paths.update(DOCS_PATHS)
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
router = APIRouter(prefix="/txinfo")
# TODO(zomglings): Factor out the enrichment logic into a separate action, because it may be useful
# independently from serving API calls (e.g. data processing).
# TODO(kompotkot): Re-organize function to be able handle each steps with exceptions.
@app.post(
@router.post(
"/ethereum_blockchain",
tags=["txinfo"],
response_model=data.TxinfoEthereumBlockchainResponse,

Wyświetl plik

@ -8,66 +8,28 @@ import uuid
from bugout.data import BugoutToken, BugoutUser, BugoutResource, BugoutUserTokens
from bugout.exceptions import BugoutResponseException
from fastapi import (
APIRouter,
Body,
FastAPI,
Form,
Request,
)
from fastapi.middleware.cors import CORSMiddleware
from .. import data
from ..middleware import BroodAuthMiddleware, MoonstreamHTTPException
from ..middleware import MoonstreamHTTPException
from ..settings import (
MOONSTREAM_APPLICATION_ID,
DOCS_TARGET_PATH,
ORIGINS,
DOCS_PATHS,
bugout_client as bc,
BUGOUT_REQUEST_TIMEOUT_SECONDS,
)
from ..version import MOONSTREAM_VERSION
from ..actions import create_onboarding_resource
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "users", "description": "Operations with users."},
{"name": "tokens", "description": "Operations with user tokens."},
]
app = FastAPI(
title=f"Moonstream users API.",
description="User, token and password handlers.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
whitelist_paths: Dict[str, str] = {}
whitelist_paths.update(DOCS_PATHS)
whitelist_paths.update(
{
"/users": "POST",
"/users/token": "POST",
"/users/password/reset_initiate": "POST",
"/users/password/reset_complete": "POST",
}
)
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
router = APIRouter(prefix="/users")
@app.post("/", tags=["users"], response_model=BugoutUser)
@router.post("/", tags=["users"], response_model=BugoutUser)
async def create_user_handler(
username: str = Form(...), email: str = Form(...), password: str = Form(...)
) -> BugoutUser:
@ -85,13 +47,13 @@ async def create_user_handler(
return user
@app.get("/", tags=["users"], response_model=BugoutUser)
@router.get("/", tags=["users"], response_model=BugoutUser)
async def get_user_handler(request: Request) -> BugoutUser:
user: BugoutUser = request.state.user
return user
@app.post("/password/reset_initiate", tags=["users"], response_model=Dict[str, Any])
@router.post("/password/reset_initiate", tags=["users"], response_model=Dict[str, Any])
async def restore_password_handler(email: str = Form(...)) -> Dict[str, Any]:
try:
response = bc.restore_password(email=email)
@ -102,7 +64,7 @@ async def restore_password_handler(email: str = Form(...)) -> Dict[str, Any]:
return response
@app.post("/password/reset_complete", tags=["users"], response_model=BugoutUser)
@router.post("/password/reset_complete", tags=["users"], response_model=BugoutUser)
async def reset_password_handler(
reset_id: str = Form(...), new_password: str = Form(...)
) -> BugoutUser:
@ -115,7 +77,7 @@ async def reset_password_handler(
return response
@app.post("/password/change", tags=["users"], response_model=BugoutUser)
@router.post("/password/change", tags=["users"], response_model=BugoutUser)
async def change_password_handler(
request: Request, current_password: str = Form(...), new_password: str = Form(...)
) -> BugoutUser:
@ -131,7 +93,7 @@ async def change_password_handler(
return user
@app.delete("/", tags=["users"], response_model=BugoutUser)
@router.delete("/", tags=["users"], response_model=BugoutUser)
async def delete_user_handler(
request: Request, password: str = Form(...)
) -> BugoutUser:
@ -146,7 +108,7 @@ async def delete_user_handler(
return user
@app.post("/token", tags=["tokens"], response_model=BugoutToken)
@router.post("/token", tags=["tokens"], response_model=BugoutToken)
async def login_handler(
username: str = Form(...),
password: str = Form(...),
@ -167,7 +129,7 @@ async def login_handler(
return token
@app.get("/tokens", tags=["tokens"], response_model=BugoutUserTokens)
@router.get("/tokens", tags=["tokens"], response_model=BugoutUserTokens)
async def tokens_handler(request: Request) -> BugoutUserTokens:
token = request.state.token
try:
@ -181,9 +143,9 @@ async def tokens_handler(request: Request) -> BugoutUserTokens:
return response
@app.put("/token", tags=["tokens"], response_model=BugoutToken)
@router.put("/token", tags=["tokens"], response_model=BugoutToken)
async def token_update_handler(
request: Request, token_note: str = Form(...), access_token: str = Form(...)
token_note: str = Form(...), access_token: str = Form(...)
) -> BugoutToken:
try:
response = bc.update_token(token=access_token, token_note=token_note)
@ -194,7 +156,7 @@ async def token_update_handler(
return response
@app.post("/revoke/{access_token}", tags=["tokens"], response_model=uuid.UUID)
@router.post("/revoke/{access_token}", tags=["tokens"], response_model=uuid.UUID)
async def delete_token_by_id_handler(
request: Request, access_token: uuid.UUID
) -> uuid.UUID:
@ -212,7 +174,7 @@ async def delete_token_by_id_handler(
return response
@app.delete("/token", tags=["tokens"], response_model=uuid.UUID)
@router.delete("/token", tags=["tokens"], response_model=uuid.UUID)
async def logout_handler(request: Request) -> uuid.UUID:
token = request.state.token
try:
@ -224,7 +186,7 @@ async def logout_handler(request: Request) -> uuid.UUID:
return token_id
@app.post("/onboarding", tags=["users"], response_model=data.OnboardingState)
@router.post("/onboarding", tags=["users"], response_model=data.OnboardingState)
async def set_onboarding_state(
request: Request,
onboarding_data: data.OnboardingState = Body(...),
@ -270,7 +232,7 @@ async def set_onboarding_state(
return result
@app.get("/onboarding", tags=["users"], response_model=data.OnboardingState)
@router.get("/onboarding", tags=["users"], response_model=data.OnboardingState)
async def get_onboarding_state(request: Request) -> data.OnboardingState:
token = request.state.token
try:
@ -305,7 +267,7 @@ async def get_onboarding_state(request: Request) -> data.OnboardingState:
return result
@app.delete("/onboarding", tags=["users"], response_model=data.OnboardingState)
@router.delete("/onboarding", tags=["users"], response_model=data.OnboardingState)
async def delete_onboarding_state(request: Request) -> data.OnboardingState:
token = request.state.token
try:

Wyświetl plik

@ -3,55 +3,28 @@ Moonstream's /whales endpoints.
These endpoints provide public access to whale watch summaries. No authentication required.
"""
from datetime import datetime
import logging
from typing import Optional
from bugout.data import BugoutResource
from fastapi import Depends, FastAPI, Query
from fastapi import APIRouter, Depends, Query
from moonstreamdb import db
from fastapi.middleware.cors import CORSMiddleware
from sqlalchemy.orm import Session
from .. import data
from ..providers.bugout import whalewatch_provider
from ..settings import (
bugout_client,
DOCS_TARGET_PATH,
MOONSTREAM_ADMIN_ACCESS_TOKEN,
MOONSTREAM_DATA_JOURNAL_ID,
ORIGINS,
)
from ..stream_queries import StreamQuery
from ..version import MOONSTREAM_VERSION
logger = logging.getLogger(__name__)
tags_metadata = [
{"name": "whales", "description": "Whales summaries"},
]
app = FastAPI(
title=f"Moonstream /whales API",
description="User, token and password handlers.",
version=MOONSTREAM_VERSION,
openapi_tags=tags_metadata,
openapi_url="/openapi.json",
docs_url=None,
redoc_url=f"/{DOCS_TARGET_PATH}",
)
app.add_middleware(
CORSMiddleware,
allow_origins=ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
router = APIRouter(prefix="/whales")
@app.get("/", tags=["whales"], response_model=data.GetEventsResponse)
@router.get("/", tags=["whales"], response_model=data.GetEventsResponse)
async def stream_handler(
start_time: int = Query(0),
end_time: Optional[int] = Query(None),

Wyświetl plik

@ -34,15 +34,6 @@ ORIGINS = RAW_ORIGINS.split(",")
# OpenAPI
DOCS_TARGET_PATH = "docs"
MOONSTREAM_OPENAPI_LIST = []
MOONSTREAM_OPENAPI_LIST_RAW = os.environ.get("MOONSTREAM_OPENAPI_LIST")
if MOONSTREAM_OPENAPI_LIST_RAW is not None:
MOONSTREAM_OPENAPI_LIST = MOONSTREAM_OPENAPI_LIST_RAW.split(",")
DOCS_PATHS = {}
for path in MOONSTREAM_OPENAPI_LIST:
DOCS_PATHS[f"/{path}/{DOCS_TARGET_PATH}"] = "GET"
DOCS_PATHS[f"/{path}/{DOCS_TARGET_PATH}/openapi.json"] = "GET"
DEFAULT_STREAM_TIMEINTERVAL = 5 * 60

Wyświetl plik

@ -1,5 +1,4 @@
export MOONSTREAM_CORS_ALLOWED_ORIGINS="http://localhost:3000,https://moonstream.to,https://www.moonstream.to"
export MOONSTREAM_OPENAPI_LIST="users,subscriptions,txinfo"
export MOONSTREAM_APPLICATION_ID="<issued_bugout_application_id>"
export MOONSTREAM_DATA_JOURNAL_ID="<bugout_journal_id_to_store_blockchain_data>"
export MOONSTREAM_DB_URI="postgresql://<username>:<password>@<db_host>:<db_port>/<db_name>"

Wyświetl plik

@ -14,7 +14,6 @@ PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
AWS_SSM_PARAMETER_PATH="${AWS_SSM_PARAMETER_PATH:-/moonstream/prod}"
SCRIPT_DIR="$(realpath $(dirname $0))"
PARAMETERS_SCRIPT="${SCRIPT_DIR}/parameters.py"
ETHEREUM_GETH_SERVICE="ethereum-node.service"
ETHEREUM_SYNCHRONIZE_SERVICE="ethereum-synchronize.service"
ETHEREUM_TRENDING_SERVICE="ethereum-trending.service"
ETHEREUM_TRENDING_TIMER="ethereum-trending.service"
@ -51,22 +50,6 @@ echo "Retrieving deployment parameters"
mkdir -p "${SECRETS_DIR}"
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" "${PYTHON}" "${PARAMETERS_SCRIPT}" extract -p "${AWS_SSM_PARAMETER_PATH}" -o "${PARAMETERS_ENV_PATH}"
echo
echo
echo "Deploy Geth service if not running already"
if systemctl is-active --quiet "${ETHEREUM_GETH_SERVICE}"
then
echo "Ethereum Geth service ${ETHEREUM_GETH_SERVICE} already running"
else
echo "Replacing Ethereum Geth service definition with ${ETHEREUM_GETH_SERVICE}"
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_GETH_SERVICE}"
cp "${SCRIPT_DIR}/${ETHEREUM_GETH_SERVICE}" "/etc/systemd/system/${ETHEREUM_GETH_SERVICE}"
systemctl daemon-reload
systemctl disable "${ETHEREUM_GETH_SERVICE}"
systemctl restart "${ETHEREUM_GETH_SERVICE}"
sleep 10
fi
echo
echo
echo "Replacing existing Ethereum block with transactions syncronizer service definition with ${ETHEREUM_SYNCHRONIZE_SERVICE}"

Wyświetl plik

@ -1,16 +0,0 @@
[Unit]
Description=Ethereum node Geth client
After=network.target
[Service]
User=ubuntu
Group=www-data
ExecStart=/usr/bin/geth --syncmode full \
--port 41381 --datadir /mnt/disks/nodes/ethereum \
--txpool.globalslots 450000 --txpool.globalqueue 50000 \
--http --http.port 18375 --http.api eth,web3,txpool
ExecStop=/bin/kill -s SIGINT -$MAINPID
SyslogIdentifier=ethereum-node
[Install]
WantedBy=multi-user.target

Wyświetl plik

@ -97,20 +97,6 @@ def ethcrawler_blocks_sync_handler(args: argparse.Namespace) -> None:
if latest_stored_block_number is None:
latest_stored_block_number = 0
block_number_difference = latest_block_number - 1 - latest_stored_block_number
if args.start is None:
if block_number_difference < args.confirmations:
logger.info(
f"Synchronization is unnecessary for blocks {latest_stored_block_number}-{latest_block_number - 1}"
)
time.sleep(5)
continue
else:
bottom_block_number = latest_block_number - args.confirmations
else:
bottom_block_number = max(latest_stored_block_number + 1, args.start)
if latest_stored_block_number >= latest_block_number:
logger.info(
f"Synchronization is unnecessary for blocks {latest_stored_block_number}-{latest_block_number - 1}"
@ -118,6 +104,25 @@ def ethcrawler_blocks_sync_handler(args: argparse.Namespace) -> None:
time.sleep(5)
continue
block_number_difference = latest_block_number - 1 - latest_stored_block_number
if block_number_difference >= 70:
logger.warning(
f"Block difference is too large: {block_number_difference}, crawling {args.confirmations + 1} latest blocks"
)
bottom_block_number = latest_block_number - args.confirmations - 1
else:
if args.start is None:
if block_number_difference < args.confirmations:
logger.info(
f"Synchronization is unnecessary for blocks {latest_stored_block_number}-{latest_block_number - 1}"
)
time.sleep(5)
continue
else:
bottom_block_number = latest_stored_block_number + 1
else:
bottom_block_number = max(latest_stored_block_number + 1, args.start)
for blocks_numbers_list in yield_blocks_numbers_lists(
f"{bottom_block_number}-{latest_block_number}",
order=args.order,

167
datasets/nfts/.gitignore vendored 100644
Wyświetl plik

@ -0,0 +1,167 @@
# Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode
# Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
### VisualStudioCode ###
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
*.code-workspace
# Local History for Visual Studio Code
.history/
### VisualStudioCode Patch ###
# Ignore all local history of files
.history
.ionide
# End of https://www.toptal.com/developers/gitignore/api/python,visualstudiocode
.venv/
.nfts/
venv/
.secrets/
.analysis/

Wyświetl plik

@ -0,0 +1,7 @@
# The Moonstream NFTs dataset
This directory contains all the code needed to construct the Moonstream NFTs dataset. These scripts
may require access to:
1. The Moonstream database
2. Moonstream Bugout data stores
3. A web3 provider

Wyświetl plik

@ -0,0 +1,319 @@
import argparse
import contextlib
import logging
import os
import sqlite3
from shutil import copyfile
from typing import Optional
from moonstreamdb.db import yield_db_session_ctx
from .enrich import EthereumBatchloader, enrich
from .data import EventType, event_types, nft_event, BlockBounds
from .datastore import setup_database, import_data, filter_data
from .derive import (
current_owners,
current_market_values,
current_values_distribution,
transfer_statistics_by_address,
quantile_generating,
mint_holding_times,
ownership_transitions,
transfer_holding_times,
transfers_mints_connection_table,
)
from .materialize import create_dataset
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
derive_functions = {
"current_owners": current_owners,
"current_market_values": current_market_values,
"current_values_distribution": current_values_distribution,
"mint_holding_times": mint_holding_times,
"ownership_transitions": ownership_transitions,
"quantile_generating": quantile_generating,
"transfer_holding_times": transfer_holding_times,
"transfers_mints_connection_table": transfers_mints_connection_table,
"transfer_statistics_by_address": transfer_statistics_by_address,
}
def handle_initdb(args: argparse.Namespace) -> None:
with contextlib.closing(sqlite3.connect(args.datastore)) as conn:
setup_database(conn)
def handle_import_data(args: argparse.Namespace) -> None:
event_type = nft_event(args.type)
with contextlib.closing(
sqlite3.connect(args.target)
) as target_conn, contextlib.closing(sqlite3.connect(args.source)) as source_conn:
import_data(target_conn, source_conn, event_type, args.batch_size)
def handle_filter_data(args: argparse.Namespace) -> None:
with contextlib.closing(sqlite3.connect(args.source)) as source_conn:
if args.target == args.source and args.source is not None:
sqlite_path = f"{args.target}.dump"
else:
sqlite_path = args.target
print(f"Creating new database:{sqlite_path}")
copyfile(args.source, sqlite_path)
# do connection
with contextlib.closing(sqlite3.connect(sqlite_path)) as source_conn:
print("Start filtering")
filter_data(
source_conn,
start_time=args.start_time,
end_time=args.end_time,
)
print("Filtering end.")
for index, function_name in enumerate(derive_functions.keys()):
print(
f"Derive process {function_name} {index+1}/{len(derive_functions.keys())}"
)
derive_functions[function_name](source_conn)
# Apply derive to new data
def handle_materialize(args: argparse.Namespace) -> None:
event_type = nft_event(args.type)
bounds: Optional[BlockBounds] = None
if args.start is not None:
bounds = BlockBounds(starting_block=args.start, ending_block=args.end)
elif args.end is not None:
raise ValueError("You cannot set --end unless you also set --start")
batch_loader = EthereumBatchloader(jsonrpc_url=args.jsonrpc)
logger.info(f"Materializing NFT events to datastore: {args.datastore}")
logger.info(f"Block bounds: {bounds}")
with yield_db_session_ctx() as db_session, contextlib.closing(
sqlite3.connect(args.datastore)
) as moonstream_datastore:
create_dataset(
moonstream_datastore,
db_session,
event_type,
bounds,
args.batch_size,
)
def handle_enrich(args: argparse.Namespace) -> None:
batch_loader = EthereumBatchloader(jsonrpc_url=args.jsonrpc)
logger.info(f"Enriching NFT events in datastore: {args.datastore}")
with contextlib.closing(sqlite3.connect(args.datastore)) as moonstream_datastore:
enrich(
moonstream_datastore,
EventType.TRANSFER,
batch_loader,
args.batch_size,
)
enrich(
moonstream_datastore,
EventType.MINT,
batch_loader,
args.batch_size,
)
def handle_derive(args: argparse.Namespace) -> None:
with contextlib.closing(sqlite3.connect(args.datastore)) as moonstream_datastore:
calling_functions = []
if not args.derive_functions:
calling_functions.extend(derive_functions.keys())
else:
calling_functions.extend(args.derive_functions)
for function_name in calling_functions:
if function_name in calling_functions:
derive_functions[function_name](moonstream_datastore)
logger.info("Done!")
def main() -> None:
"""
"nfts" command handler.
When reading this code, to find the definition of any of the "nfts" subcommands, grep for comments
of the form:
# Command: nfts <subcommand>
"""
default_web3_provider = os.environ.get("MOONSTREAM_WEB3_PROVIDER")
if default_web3_provider is not None and not default_web3_provider.startswith(
"http"
):
raise ValueError(
f"Please either unset MOONSTREAM_WEB3_PROVIDER environment variable or set it to an HTTP/HTTPS URL. Current value: {default_web3_provider}"
)
parser = argparse.ArgumentParser(
description="Tools to work with the Moonstream NFTs dataset"
)
subcommands = parser.add_subparsers(title="Subcommands")
# Command: nfts initdb
parser_initdb = subcommands.add_parser(
"initdb",
description="Initialize an SQLite datastore for the Moonstream NFTs dataset",
)
parser_initdb.add_argument("datastore")
parser_initdb.set_defaults(func=handle_initdb)
# Command: nfts materialize
parser_materialize = subcommands.add_parser(
"materialize", description="Create/update the NFTs dataset"
)
parser_materialize.add_argument(
"-d",
"--datastore",
required=True,
help="Path to SQLite database representing the dataset",
)
parser_materialize.add_argument(
"--jsonrpc",
default=default_web3_provider,
type=str,
help=f"Http uri provider to use when collecting data directly from the Ethereum blockchain (default: {default_web3_provider})",
)
parser_materialize.add_argument(
"-t",
"--type",
choices=event_types,
help="Type of event to materialize intermediate data for",
)
parser_materialize.add_argument(
"--start", type=int, default=None, help="Starting block number"
)
parser_materialize.add_argument(
"--end", type=int, default=None, help="Ending block number"
)
parser_materialize.add_argument(
"-n",
"--batch-size",
type=int,
default=1000,
help="Number of events to process per batch",
)
parser_materialize.set_defaults(func=handle_materialize)
parser_derive = subcommands.add_parser(
"derive", description="Create/updated derived data in the dataset"
)
parser_derive.add_argument(
"-d",
"--datastore",
required=True,
help="Path to SQLite database representing the dataset",
)
parser_derive.add_argument(
"-f",
"--derive-functions",
required=False,
nargs="+",
help=f"Functions wich will call from derive module availabel {list(derive_functions.keys())}",
)
parser_derive.set_defaults(func=handle_derive)
parser_import_data = subcommands.add_parser(
"import-data",
description="Import data from another source NFTs dataset datastore. This operation is performed per table, and replaces the existing table in the target datastore.",
)
parser_import_data.add_argument(
"--target",
required=True,
help="Datastore into which you want to import data",
)
parser_import_data.add_argument(
"--source", required=True, help="Datastore from which you want to import data"
)
parser_import_data.add_argument(
"--type",
required=True,
choices=event_types,
help="Type of data you would like to import from source to target",
)
parser_import_data.add_argument(
"-N",
"--batch-size",
type=int,
default=10000,
help="Batch size for database commits into target datastore.",
)
parser_import_data.set_defaults(func=handle_import_data)
# Create dump of filtered data
parser_filtered_copy = subcommands.add_parser(
"filter-data",
description="Create copy of database with applied filters.",
)
parser_filtered_copy.add_argument(
"--target",
required=True,
help="Datastore into which you want to import data",
)
parser_filtered_copy.add_argument(
"--source", required=True, help="Datastore from which you want to import data"
)
parser_filtered_copy.add_argument(
"--start-time",
required=False,
type=int,
help="Start timestamp.",
)
parser_filtered_copy.add_argument(
"--end-time",
required=False,
type=int,
help="End timestamp.",
)
parser_filtered_copy.set_defaults(func=handle_filter_data)
parser_enrich = subcommands.add_parser(
"enrich", description="enrich dataset from geth node"
)
parser_enrich.add_argument(
"-d",
"--datastore",
required=True,
help="Path to SQLite database representing the dataset",
)
parser_enrich.add_argument(
"--jsonrpc",
default=default_web3_provider,
type=str,
help=f"Http uri provider to use when collecting data directly from the Ethereum blockchain (default: {default_web3_provider})",
)
parser_enrich.add_argument(
"-n",
"--batch-size",
type=int,
default=1000,
help="Number of events to process per batch",
)
parser_enrich.set_defaults(func=handle_enrich)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()

Wyświetl plik

@ -0,0 +1,50 @@
"""
Data structures used in (and as part of the maintenance of) the Moonstream NFTs dataset
"""
from dataclasses import dataclass
from enum import Enum
from os import name
from typing import Optional
@dataclass
class BlockBounds:
starting_block: int
ending_block: Optional[int] = None
class EventType(Enum):
TRANSFER = "nft_transfer"
MINT = "nft_mint"
ERC721 = "erc721"
event_types = {event_type.value: event_type for event_type in EventType}
def nft_event(raw_event: str) -> EventType:
try:
return event_types[raw_event]
except KeyError:
raise ValueError(f"Unknown nft event type: {raw_event}")
@dataclass
class NFTEvent:
event_id: str
event_type: EventType
nft_address: str
token_id: str
from_address: str
to_address: str
transaction_hash: str
value: Optional[int] = None
block_number: Optional[int] = None
timestamp: Optional[int] = None
@dataclass
class NFTMetadata:
address: str
name: str
symbol: str

Wyświetl plik

@ -0,0 +1,228 @@
"""
Functions to access various data in the NFTs dataset.
"""
import sqlite3
from typing import List, Optional, Tuple
import numpy as np
import pandas as pd
import scipy.sparse
from tqdm import tqdm
from .datastore import event_tables, EventType
# TODO(zomglings): Make it so that table names are parametrized by importable variables. The way
# things are now, we have to be very careful if we ever rename a table in our dataset. We should
# also propagate the name change here.
NFTS = "nfts"
MINTS = event_tables[EventType.MINT]
TRANSFERS = event_tables[EventType.TRANSFER]
CURRENT_OWNERS = "current_owners"
CURRENT_MARKET_VALUES = "current_market_values"
TRANSFER_STATISTICS_BY_ADDRESS = "transfer_statistics_by_address"
MINT_HOLDING_TIMES = "mint_holding_times"
TRANSFER_HOLDING_TIMES = "transfer_holding_times"
OWNERSHIP_TRANSITIONS = "ownership_transitions"
AVAILABLE_DATAFRAMES = {
NFTS: """Describes the NFT contracts represented in this dataset, with a name and symbol if they were available at time of crawl.
Columns:
1. address: The Ethereum address of the NFT contract.
2. name: The name of the collection of NFTs that the contract represents.
3. symbol: The symbol of the collection of NFTs that the contract represents.
""",
MINTS: """All token mint events crawled in this dataset.
Columns:
1. event_id: A unique event ID associated with the event.
2. transaction_hash: The hash of the transaction which triggered the event.
3. block_number: The transaction block in which the transaction was mined.
4. nft_address: The address of the NFT collection containing the minted token.
5. token_id: The ID of the token that was minted.
6. from_address: The "from" address for the transfer event. For a mint, this should be the 0 address: 0x0000000000000000000000000000000000000000.
7. to_address: The "to" address for the transfer event. This represents the owner of the freshly minted token.
8. transaction_value: The amount of WEI that were sent with the transaction in which the token was minted.
9. timestamp: The time at which the mint operation was mined into the blockchain (this is the timestamp for the mined block).
""",
TRANSFERS: """All token transfer events crawled in this dataset.
Columns:
1. event_id: A unique event ID associated with the event.
2. transaction_hash: The hash of the transaction which triggered the event.
3. block_number: The transaction block in which the transaction was mined.
4. nft_address: The address of the NFT collection containing the transferred token.
5. token_id: The ID of the token that was transferred.
6. from_address: The "from" address for the transfer event. This is the address that owned the token at the *start* of the transfer.
7. to_address: The "to" address for the transfer event. This is the address that owned the token at the *end* of the transfer.
8. transaction_value: The amount of WEI that were sent with the transaction in which the token was transferred.
9. timestamp: The time at which the transfer operation was mined into the blockchain (this is the timestamp for the mined block).
""",
CURRENT_OWNERS: f"""This table is derived from the {NFTS}, {MINTS}, and {TRANSFERS} tables. It represents the current owner of each token in the dataset.
Columns:
1. nft_address: The address of the NFT collection containing the token whose ownership we are denoting.
2. token_id: The ID of the token (inside the collection) whose ownership we are denoting.
3. owner: The address that owned the token at the time of construction of this dataset.
""",
CURRENT_MARKET_VALUES: f"""This table is derived from the {NFTS}, {MINTS}, and {TRANSFERS} tables. It represents the current market value (in WEI) of each token in the dataset.
Columns:
1. nft_address: The address of the NFT collection containing the token whose market value we are denoting.
2. token_id: The ID of the token (inside the collection) whose market value we are denoting.
3. market_value: The estimated market value of the token at the time of construction of this dataset.
For this dataset, we estimate the market value as the last non-zero transaction value for a transfer involving this token.
This estimate may be inaccurate for some transfers (e.g. multiple token transfers made by an escrow contract in a single transaction)
but ought to be reasonably accurate for a large majority of tokens.
""",
TRANSFER_STATISTICS_BY_ADDRESS: f"""This table is derived from the {NFTS}, {MINTS}, and {TRANSFERS} tables. For each address that participated in
at least one NFT transfer between April 1, 2021 and September 25, 2021, this table shows exactly how many NFTs that address transferred to
other addresses and how many NFT transfers that address was the recipient of.
Columns:
1. address: An Ethereum address that participated in at least one NFT transfer between April 1, 2021 and September 25, 2021.
2. transfers_out: The number of NFTs that the given address transferred to any other address between April 1, 2021 and September 25, 2021.
3. transfers_in: The number of NFTs that any other address transferred to given address between April 1, 2021 and September 25, 2021.
""",
}
AVAILABLE_MATRICES = {
OWNERSHIP_TRANSITIONS: f"""{OWNERSHIP_TRANSITIONS} is an adjacency matrix which counts the number of times that a token was transferred from a source address (indexed by the rows of the matrix) to a target address (indexed by the columns of the matrix).
These counts only include data about mints and transfers made between April 1, 2021 and September 25, 2021. We also denote the current owners of an NFT as having transitioned
the NFT from themselves back to themselves. This gives some estimate of an owner retaining the NFT in the given time period.
Load this matrix as follows:
>>> indexed_addresses, transitions = ds.load_ownership_transitions()
- "indexed_addresses" is a list denoting the address that each index (row/column) in the matrix represents.
- "transitions" is a numpy ndarray containing the matrix, with source addresses on the row axis and target addresses on the column axis.
"""
}
def explain() -> None:
"""
Explains the structure of the dataset.
"""
preamble = """
The Moonstream NFTs dataset
===========================
To load the NFTs dataset from a SQLite file, run:
>>> ds = nfts.dataset.FromSQLite(<path to sqlite database>)
This dataset consists of the following dataframes:"""
print(preamble)
for name, explanation in AVAILABLE_DATAFRAMES.items():
print(f"\nDataframe: {name}")
print(
f'Load using:\n>>> {name}_df = ds.load_dataframe(<sqlite connection or path to sqlite db>, "{name}")'
)
print("")
print(explanation)
print("- - -")
for name, explanation in AVAILABLE_MATRICES.items():
print(f"\nMatrix: {name}")
print("")
print(explanation)
print("- - -")
class FromSQLite:
def __init__(self, datafile: str) -> None:
"""
Initialize an NFTs dataset instance by connecting it to a SQLite database containing the data.
"""
self.conn = sqlite3.connect(datafile)
self.ownership_transitions: Optional[
Tuple[List[str], scipy.sparse.spmatrix]
] = None
self.ownership_transition_probabilities: Optional[
Tuple[List[str], scipy.sparse.spmatrix]
] = None
def load_dataframe(self, name: str) -> pd.DataFrame:
"""
Loads one of the available dataframes. To learn more about the available dataframes, run:
>>> nfts.dataset.explain()
"""
if name not in AVAILABLE_DATAFRAMES:
raise ValueError(
f"Invalid dataframe: {name}. Please choose from one of the available dataframes: {','.join(AVAILABLE_DATAFRAMES)}."
)
df = pd.read_sql_query(f"SELECT * FROM {name};", self.conn)
return df
def load_ownership_transitions(
self, force: bool = False
) -> Tuple[List[str], scipy.sparse.spmatrix]:
"""
Loads ownership transitions adjacency matrix from SQLite database.
To learn more about this matrix, run:
>>> nfts.dataset.explain()
"""
if self.ownership_transitions is not None and not force:
return self.ownership_transitions
cur = self.conn.cursor()
address_indexes_query = """
WITH all_addresses AS (
SELECT from_address AS address FROM ownership_transitions
UNION
SELECT to_address AS address FROM ownership_transitions
)
SELECT DISTINCT(all_addresses.address) AS address FROM all_addresses ORDER BY address ASC;
"""
addresses = [row[0] for row in cur.execute(address_indexes_query)]
num_addresses = len(addresses)
address_indexes = {address: i for i, address in enumerate(addresses)}
adjacency_matrix = scipy.sparse.dok_matrix((num_addresses, num_addresses))
adjacency_query = "SELECT from_address, to_address, num_transitions FROM ownership_transitions;"
rows = cur.execute(adjacency_query)
for from_address, to_address, num_transitions in tqdm(
rows, desc="Ownership transitions (adjacency matrix)"
):
from_index = address_indexes[from_address]
to_index = address_indexes[to_address]
adjacency_matrix[from_index, to_index] = num_transitions
self.ownership_transitions = (addresses, adjacency_matrix)
return self.ownership_transitions
def load_ownership_transition_probabilities(
self,
force: bool = False,
) -> Tuple[List[str], scipy.sparse.spmatrix]:
"""
Returns transition probabilities of ownership transitions, with each entry A_{i,j} denoting the
probability that the address represented by row i transferred and NFT to the address represented by row[j].
"""
if self.ownership_transition_probabilities is not None and not force:
return self.ownership_transition_probabilities
addresses, adjacency_matrix = self.load_ownership_transitions(force)
# Sum of the entries in each row:
# https://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.spmatrix.sum.html#scipy.sparse.spmatrix.sum
row_sums = adjacency_matrix.sum(axis=1)
# Convert adjacency matrix to matrix of transition probabilities.
# We cannot do this by simply dividing transition_probabilites /= row_sums because that tries
# to coerce the matrix into a dense numpy ndarray and requires terabytes of memory.
transition_probabilities = adjacency_matrix.copy()
for i, j in zip(*transition_probabilities.nonzero()):
transition_probabilities[i, j] = (
transition_probabilities[i, j] / row_sums[i]
)
# Now we identify and remove burn addresses from this data.
self.ownership_transition_probabilities = (addresses, transition_probabilities)
return self.ownership_transition_probabilities

Wyświetl plik

@ -0,0 +1,464 @@
"""
This module provides tools to interact with and maintain a SQLite database which acts/should act as
a datastore for a Moonstream NFTs dataset.
"""
import logging
import sqlite3
from typing import Any, cast, List, Tuple, Optional
from tqdm import tqdm
from .data import EventType, NFTEvent, NFTMetadata
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
event_tables = {EventType.TRANSFER: "transfers", EventType.MINT: "mints"}
CREATE_NFTS_TABLE_QUERY = """CREATE TABLE IF NOT EXISTS nfts
(
address TEXT NOT NULL UNIQUE ON CONFLICT FAIL,
name TEXT,
symbol TEXT,
UNIQUE(address, name, symbol)
);
"""
BACKUP_NFTS_TABLE_QUERY = "ALTER TABLE nfts RENAME TO nfts_backup;"
DROP_BACKUP_NFTS_TABLE_QUERY = "DROP TABLE IF EXISTS nfts_backup;"
SELECT_NFTS_QUERY = "SELECT address, name, symbol FROM nfts;"
CREATE_CHECKPOINT_TABLE_QUERY = """CREATE TABLE IF NOT EXISTS checkpoint
(
event_type STRING,
offset INTEGER
);
"""
def create_events_table_query(event_type: EventType) -> str:
creation_query = f"""
CREATE TABLE IF NOT EXISTS {event_tables[event_type]}
(
event_id TEXT NOT NULL UNIQUE ON CONFLICT FAIL,
transaction_hash TEXT,
block_number INTEGER,
nft_address TEXT REFERENCES nfts(address),
token_id TEXT,
from_address TEXT,
to_address TEXT,
transaction_value INTEGER,
timestamp INTEGER
);
"""
return creation_query
def backup_events_table_query(event_type: EventType) -> str:
backup_query = f"ALTER TABLE {event_tables[event_type]} RENAME TO {event_tables[event_type]}_backup;"
return backup_query
def drop_backup_events_table_query(event_type: EventType) -> str:
drop_query = f"DROP TABLE IF EXISTS {event_tables[event_type]}_backup;"
return drop_query
def select_events_table_query(event_type: EventType) -> str:
selection_query = f"""
SELECT
event_id,
transaction_hash,
nft_address,
token_id,
from_address,
to_address,
transaction_value,
block_number,
timestamp
FROM {event_tables[event_type]};
"""
return selection_query
def get_events_for_enrich(
conn: sqlite3.Connection, event_type: EventType
) -> List[NFTEvent]:
def select_query(event_type: EventType) -> str:
selection_query = f"""
SELECT
event_id,
transaction_hash,
block_number,
nft_address,
token_id,
from_address,
to_address,
transaction_value,
timestamp
FROM {event_tables[event_type]} WHERE block_number = 'None';
"""
return selection_query
logger.info(f"Loading {event_tables[event_type]} table events for enrich")
cur = conn.cursor()
cur.execute(select_query(event_type))
events: List[NFTEvent] = []
for row in cur:
(
event_id,
transaction_hash,
block_number,
nft_address,
token_id,
from_address,
to_address,
value,
timestamp,
) = cast(
Tuple[
str,
str,
Optional[int],
str,
str,
str,
str,
Optional[int],
Optional[int],
],
row,
)
event = NFTEvent(
event_id=event_id,
event_type=event_type, # Original argument to this function
nft_address=nft_address,
token_id=token_id,
from_address=from_address,
to_address=to_address,
transaction_hash=transaction_hash,
value=value,
block_number=block_number,
timestamp=timestamp,
)
events.append(event)
logger.info(f"Found {len(events)} events to enrich")
return events
def update_events_batch(conn: sqlite3.Connection, events: List[NFTEvent]) -> None:
def replace_query(event_type: EventType) -> str:
query = f"""
REPLACE INTO {event_tables[event_type]}(
event_id,
transaction_hash,
block_number,
nft_address,
token_id,
from_address,
to_address,
transaction_value,
timestamp
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
return query
logger.info("Updating events in sqlite")
cur = conn.cursor()
try:
transfers = [
nft_event_to_tuple(event)
for event in events
if event.event_type == EventType.TRANSFER
]
mints = [
nft_event_to_tuple(event)
for event in events
if event.event_type == EventType.MINT
]
cur.executemany(replace_query(EventType.TRANSFER), transfers)
cur.executemany(replace_query(EventType.MINT), mints)
conn.commit()
except Exception as e:
logger.error(f"FAILED TO replace!!! :{events}")
conn.rollback()
raise e
def setup_database(conn: sqlite3.Connection) -> None:
"""
Sets up the schema of the Moonstream NFTs dataset in the given SQLite database.
"""
cur = conn.cursor()
cur.execute(CREATE_NFTS_TABLE_QUERY)
cur.execute(create_events_table_query(EventType.TRANSFER))
cur.execute(create_events_table_query(EventType.MINT))
cur.execute(CREATE_CHECKPOINT_TABLE_QUERY)
conn.commit()
def insert_events_query(event_type: EventType) -> str:
"""
Generates a query which inserts NFT events into the appropriate events table.
"""
query = f"""
INSERT OR IGNORE INTO {event_tables[event_type]}(
event_id,
transaction_hash,
block_number,
nft_address,
token_id,
from_address,
to_address,
transaction_value,
timestamp
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
return query
def nft_event_to_tuple(
event: NFTEvent,
) -> Tuple[str, str, str, str, str, str, str, str, str]:
"""
Converts an NFT event into a tuple for use with sqlite cursor executemany. This includes
dropping e.g. the event_type field.
"""
return (
str(event.event_id),
str(event.transaction_hash),
str(event.block_number),
str(event.nft_address),
str(event.token_id),
str(event.from_address),
str(event.to_address),
str(event.value),
str(event.timestamp),
)
def get_checkpoint_offset(
conn: sqlite3.Connection, event_type: EventType
) -> Optional[int]:
cur = conn.cursor()
response = cur.execute(
f"SELECT * from checkpoint where event_type='{event_type.value}' order by rowid desc limit 1"
)
for row in response:
return row[1]
return None
def delete_checkpoints(
conn: sqlite3.Connection, event_type: EventType, commit: bool = True
) -> None:
cur = conn.cursor()
cur.execute(f"DELETE FROM checkpoint where event_type='{event_type.value}';")
if commit:
try:
conn.commit()
except:
conn.rollback()
raise
def insert_checkpoint(conn: sqlite3.Connection, event_type: EventType, offset: int):
query = f"""
INSERT INTO checkpoint (
event_type,
offset
) VALUES (?, ?)
"""
cur = conn.cursor()
cur.execute(query, [event_type.value, offset])
conn.commit()
def insert_address_metadata(
conn: sqlite3.Connection, metadata_list: List[NFTMetadata]
) -> None:
cur = conn.cursor()
query = f"""
INSERT INTO nfts (
address,
name,
symbol
) VALUES (?, ?, ?)
"""
try:
nfts = [
(metadata.address, metadata.name, metadata.symbol)
for metadata in metadata_list
]
cur.executemany(query, nfts)
conn.commit()
except Exception as e:
logger.error(f"Failed to save :\n {metadata_list}")
conn.rollback()
raise e
def insert_events(conn: sqlite3.Connection, events: List[NFTEvent]) -> None:
"""
Inserts the given events into the appropriate events table in the given SQLite database.
This method works with batches of events.
"""
cur = conn.cursor()
try:
transfers = [
nft_event_to_tuple(event)
for event in events
if event.event_type == EventType.TRANSFER
]
mints = [
nft_event_to_tuple(event)
for event in events
if event.event_type == EventType.MINT
]
cur.executemany(insert_events_query(EventType.TRANSFER), transfers)
cur.executemany(insert_events_query(EventType.MINT), mints)
conn.commit()
except Exception as e:
logger.error(f"FAILED TO SAVE :{events}")
conn.rollback()
raise e
def import_data(
target_conn: sqlite3.Connection,
source_conn: sqlite3.Connection,
event_type: EventType,
batch_size: int = 1000,
) -> None:
"""
Imports the data correspondong to the given event type from the source database into the target
database.
Any existing data of that type in the target database is first deleted. It is a good idea to
create a backup copy of your target database before performing this operation.
"""
target_cur = target_conn.cursor()
drop_backup_query = DROP_BACKUP_NFTS_TABLE_QUERY
backup_table_query = BACKUP_NFTS_TABLE_QUERY
create_table_query = CREATE_NFTS_TABLE_QUERY
source_selection_query = SELECT_NFTS_QUERY
if event_type != EventType.ERC721:
drop_backup_query = drop_backup_events_table_query(event_type)
backup_table_query = backup_events_table_query(event_type)
create_table_query = create_events_table_query(event_type)
source_selection_query = select_events_table_query(event_type)
target_cur.execute(drop_backup_query)
target_cur.execute(backup_table_query)
target_cur.execute(create_table_query)
target_conn.commit()
source_cur = source_conn.cursor()
source_cur.execute(source_selection_query)
batch: List[Any] = []
for row in tqdm(source_cur, desc="Rows processed"):
if event_type == EventType.ERC721:
batch.append(NFTMetadata(*cast(Tuple[str, str, str], row)))
else:
# Order matches select query returned by select_events_table_query
(
event_id,
transaction_hash,
nft_address,
token_id,
from_address,
to_address,
value,
block_number,
timestamp,
) = cast(
Tuple[
str,
str,
str,
str,
str,
str,
Optional[int],
Optional[int],
Optional[int],
],
row,
)
event = NFTEvent(
event_id=event_id,
event_type=event_type, # Original argument to this function
nft_address=nft_address,
token_id=token_id,
from_address=from_address,
to_address=to_address,
transaction_hash=transaction_hash,
value=value,
block_number=block_number,
timestamp=timestamp,
)
batch.append(event)
if len(batch) == batch_size:
if event_type == EventType.ERC721:
insert_address_metadata(target_conn, cast(List[NFTMetadata], batch))
else:
insert_events(target_conn, cast(List[NFTEvent], batch))
if event_type == EventType.ERC721:
insert_address_metadata(target_conn, cast(List[NFTMetadata], batch))
else:
insert_events(target_conn, cast(List[NFTEvent], batch))
target_cur.execute(CREATE_CHECKPOINT_TABLE_QUERY)
target_conn.commit()
source_offset = get_checkpoint_offset(source_conn, event_type)
if source_offset is not None:
delete_checkpoints(target_conn, event_type, commit=False)
insert_checkpoint(target_conn, event_type, source_offset)
def filter_data(
sqlite_db: sqlite3.Connection,
start_time: Optional[int] = None,
end_time: Optional[int] = None,
):
"""
Run Deletes query depends on filters
"""
cur = sqlite_db.cursor()
print(f"Remove by timestamp < {start_time}")
if start_time:
cur.execute(f"DELETE from transfers where timestamp < {start_time}")
print(f"Transfers filtered out: {cur.rowcount}")
sqlite_db.commit()
cur.execute(f"DELETE from mints where timestamp < {start_time}")
print(f"Mints filtered out: {cur.rowcount}")
sqlite_db.commit()
print(f"Remove by timestamp > {end_time}")
if end_time:
cur.execute(f"DELETE from transfers where timestamp > {end_time}")
print(f"Transfers filtered out: {cur.rowcount}")
sqlite_db.commit()
cur.execute(f"DELETE from mints where timestamp > {end_time}")
print(f"Mints filtered out: {cur.rowcount}")
sqlite_db.commit()

Wyświetl plik

@ -0,0 +1,494 @@
"""
Tools to build derived relations from raw data (nfts, transfers, mints relations).
For example:
- Current owner of each token
- Current value of each token
"""
import logging
import sqlite3
logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger(__name__)
class LastValue:
"""
Stores the last seen value in a given column. This is meant to be used as an aggregate function.
We use it, for example, to get the current owner of an NFT (inside a given window of time).
"""
def __init__(self):
self.value = None
def step(self, value):
self.value = value
def finalize(self):
return self.value
class LastNonzeroValue:
"""
Stores the last non-zero value in a given column. This is meant to be used as an aggregate
function. We use it, for example, to get the current market value of an NFT (inside a given
window of time).
"""
def __init__(self):
self.value = 0
def step(self, value):
if value != 0:
self.value = value
def finalize(self):
return self.value
class QuantileFunction:
"""Split vlues to quantiles"""
def __init__(self, num_quantiles) -> None:
self.divider = 1 / num_quantiles
def __call__(self, value):
if value is None or value == "None":
value = 0
quantile = self.divider
try:
while value > quantile:
quantile += self.divider
if quantile > 1:
quantile = 1
return quantile
except Exception as err:
print(err)
raise
def ensure_custom_aggregate_functions(conn: sqlite3.Connection) -> None:
"""
Loads custom aggregate functions to an active SQLite3 connection.
"""
conn.create_aggregate("last_value", 1, LastValue)
conn.create_aggregate("last_nonzero_value", 1, LastNonzeroValue)
conn.create_function("quantile_10", 1, QuantileFunction(10))
conn.create_function("quantile_25", 1, QuantileFunction(25))
def current_owners(conn: sqlite3.Connection) -> None:
"""
Requires a connection to a dataset in which the raw data (esp. transfers) has already been
loaded.
"""
ensure_custom_aggregate_functions(conn)
drop_existing_current_owners_query = "DROP TABLE IF EXISTS current_owners;"
current_owners_query = """
CREATE TABLE current_owners AS
SELECT nft_address, token_id, last_value(to_address) AS owner FROM
(
SELECT * FROM mints
UNION ALL
SELECT * FROM transfers
)
GROUP BY nft_address, token_id;"""
cur = conn.cursor()
try:
cur.execute(drop_existing_current_owners_query)
cur.execute(current_owners_query)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: current_owners")
logger.error(e)
def current_market_values(conn: sqlite3.Connection) -> None:
"""
Requires a connection to a dataset in which the raw data (esp. transfers) has already been
loaded.
"""
ensure_custom_aggregate_functions(conn)
drop_existing_current_market_values_query = (
"DROP TABLE IF EXISTS current_market_values;"
)
current_market_values_query = """
CREATE TABLE current_market_values AS
SELECT nft_address, token_id, last_nonzero_value(transaction_value) AS market_value FROM
(
SELECT * FROM mints
UNION ALL
SELECT * FROM transfers
)
GROUP BY nft_address, token_id;"""
cur = conn.cursor()
try:
cur.execute(drop_existing_current_market_values_query)
cur.execute(current_market_values_query)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: current_market_values")
def current_values_distribution(conn: sqlite3.Connection) -> None:
"""
Requires a connection to a dataset in which current_market_values has already been loaded.
"""
ensure_custom_aggregate_functions(conn)
drop_existing_values_distribution_query = (
"DROP TABLE IF EXISTS market_values_distribution;"
)
current_values_distribution_query = """
CREATE TABLE market_values_distribution AS
select
current_market_values.nft_address as address,
current_market_values.token_id as token_id,
CAST(current_market_values.market_value as REAL) / max_values.max_value as relative_value
from
current_market_values
inner join (
select
nft_address,
max(market_value) as max_value
from
current_market_values
group by
nft_address
) as max_values on current_market_values.nft_address = max_values.nft_address;
"""
cur = conn.cursor()
try:
cur.execute(drop_existing_values_distribution_query)
cur.execute(current_values_distribution_query)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: current_values_distribution")
logger.error(e)
def transfer_statistics_by_address(conn: sqlite3.Connection) -> None:
"""
Create transfer in and transfer out for each address.
"""
drop_existing_transfer_statistics_by_address_query = (
"DROP TABLE IF EXISTS transfer_statistics_by_address;"
)
transfer_statistics_by_address_query = """
CREATE TABLE transfer_statistics_by_address AS
SELECT
address,
sum(transfer_out) as transfers_out,
sum(transfer_in) as transfers_in
from
(
SELECT
from_address as address,
1 as transfer_out,
0 as transfer_in
from
transfers
UNION
ALL
select
to_address as address,
0 as transfer_out,
1 as transfer_in
from
transfers
)
group by
address;
"""
cur = conn.cursor()
try:
cur.execute(drop_existing_transfer_statistics_by_address_query)
cur.execute(transfer_statistics_by_address_query)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: transfer_statistics_by_address")
logger.error(e)
def quantile_generating(conn: sqlite3.Connection):
"""
Create quantile wich depends on setted on class defenition
"""
ensure_custom_aggregate_functions(conn)
drop_calculate_10_quantiles = (
"DROP TABLE IF EXISTS transfer_values_quantile_10_distribution_per_address;"
)
calculate_10_quantiles = """
CREATE TABLE transfer_values_quantile_10_distribution_per_address AS
select
cumulate.address as address,
CAST(quantile_10(cumulate.relative_value) as TEXT) as quantiles,
cumulate.relative_value as relative_value
from
(
select
current_market_values.nft_address as address,
COALESCE(
CAST(current_market_values.market_value as REAL) / max_values.max_value,
0
) as relative_value
from
current_market_values
inner join (
select
current_market_values.nft_address,
max(market_value) as max_value
from
current_market_values
group by
current_market_values.nft_address
) as max_values on current_market_values.nft_address = max_values.nft_address
) as cumulate
"""
drop_calculate_25_quantiles = (
"DROP TABLE IF EXISTS transfer_values_quantile_25_distribution_per_address;"
)
calculate_25_quantiles = """
CREATE TABLE transfer_values_quantile_25_distribution_per_address AS
select
cumulate.address as address,
CAST(quantile_25(cumulate.relative_value) as TEXT) as quantiles,
cumulate.relative_value as relative_value
from
(
select
current_market_values.nft_address as address,
COALESCE(
CAST(current_market_values.market_value as REAL) / max_values.max_value,
0
) as relative_value
from
current_market_values
inner join (
select
current_market_values.nft_address,
max(market_value) as max_value
from
current_market_values
group by
current_market_values.nft_address
) as max_values on current_market_values.nft_address = max_values.nft_address
) as cumulate
"""
cur = conn.cursor()
try:
print("Creating transfer_values_quantile_10_distribution_per_address")
cur.execute(drop_calculate_10_quantiles)
cur.execute(calculate_10_quantiles)
print("Creating transfer_values_quantile_25_distribution_per_address")
cur.execute(drop_calculate_25_quantiles)
cur.execute(calculate_25_quantiles)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: quantile_generating")
logger.error(e)
def transfers_mints_connection_table(conn: sqlite3.Connection):
"""
Create cinnection transfers and mints
"""
drop_transfers_mints_connection = "DROP TABLE IF EXISTS transfers_mints;"
transfers_mints_connection = """
CREATE TABLE transfers_mints as
select
transfers.event_id as transfer_id,
mints.mint_id as mint_id
from
transfers
inner join (
select
Max(posable_mints.mints_time) as mint_time,
posable_mints.transfer_id as transfer_id
from
(
select
mint_id,
mints.timestamp as mints_time,
transfers.token_id,
transfers.timestamp,
transfers.event_id as transfer_id
from
transfers
inner join (
select
mints.event_id as mint_id,
mints.nft_address,
mints.token_id,
mints.timestamp
from
mints
group by
mints.nft_address,
mints.token_id,
mints.timestamp
) as mints on transfers.nft_address = mints.nft_address
and transfers.token_id = mints.token_id
and mints.timestamp <= transfers.timestamp
) as posable_mints
group by
posable_mints.transfer_id
) as mint_time on mint_time.transfer_id = transfers.event_id
inner join (
select
mints.event_id as mint_id,
mints.nft_address,
mints.token_id,
mints.timestamp
from
mints
) as mints on transfers.nft_address = mints.nft_address
and transfers.token_id = mints.token_id
and mints.timestamp = mint_time.mint_time;
"""
cur = conn.cursor()
try:
cur.execute(drop_transfers_mints_connection)
cur.execute(transfers_mints_connection)
conn.commit()
except Exception as e:
conn.rollback()
logger.error(
"Could not create derived dataset: transfers_mints_connection_table"
)
logger.error(e)
def mint_holding_times(conn: sqlite3.Connection):
drop_mints_holding_table = "DROP TABLE IF EXISTS mint_holding_times;"
mints_holding_table = """
CREATE TABLE mint_holding_times AS
SELECT
days_after_minted.days as days,
count(*) as num_holds
from
(
SELECT
mints.nft_address,
mints.token_id,
(
firsts_transfers.firts_transfer - mints.timestamp
) / 86400 as days
from
mints
inner join (
select
transfers_mints.mint_id,
transfers.nft_address,
transfers.token_id,
min(transfers.timestamp) as firts_transfer
from
transfers
inner join transfers_mints on transfers_mints.transfer_id = transfers.event_id
group by
transfers.nft_address,
transfers.token_id,
transfers_mints.mint_id
) as firsts_transfers on firsts_transfers.mint_id = mints.event_id
) as days_after_minted
group by days;
"""
cur = conn.cursor()
try:
cur.execute(drop_mints_holding_table)
cur.execute(mints_holding_table)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: mint_holding_times")
logger.error(e)
def transfer_holding_times(conn: sqlite3.Connection):
"""
Create distributions of holding times beetween transfers
"""
drop_transfer_holding_times = "DROP TABLE IF EXISTS transfer_holding_times;"
transfer_holding_times = """
CREATE TABLE transfer_holding_times AS
select days_beetween.days as days, count(*) as num_holds
from (SELECT
middle.address,
middle.token_id,
(middle.LEAD - middle.timestamp) / 86400 as days
from
(
SELECT
nft_address AS address,
token_id as token_id,
timestamp as timestamp,
LEAD(timestamp, 1, Null) OVER (
PARTITION BY nft_address,
token_id
ORDER BY
timestamp
) as LEAD
FROM
transfers
) as middle
where
LEAD is not Null
) as days_beetween
group by days;
"""
cur = conn.cursor()
try:
cur.execute(drop_transfer_holding_times)
cur.execute(transfer_holding_times)
conn.commit()
except Exception as e:
conn.rollback()
logger.error("Could not create derived dataset: transfer_holding_times")
logger.error(e)
def ownership_transitions(conn: sqlite3.Connection) -> None:
"""
Derives a table called ownership_transitions which counts the number of transitions in ownership
from address A to address B for each pair of addresses (A, B) for which there was at least
one transfer from A to B.
Requires the following tables:
- transfers
- current_owners
"""
table_name = "ownership_transitions"
drop_ownership_transitions = f"DROP TABLE IF EXISTS {table_name};"
# TODO(zomglings): Adding transaction_value below causes integer overflow. Might be worth trying MEAN instead of SUM for value transferred.
create_ownership_transitions = f"""
CREATE TABLE {table_name} AS
WITH transitions(from_address, to_address, transition) AS (
SELECT current_owners.owner as from_address, current_owners.owner as to_address, 1 as transition FROM current_owners
UNION ALL
SELECT transfers.from_address as from_address, transfers.to_address as to_address, 1 as transition FROM transfers
)
SELECT
transitions.from_address,
transitions.to_address,
sum(transitions.transition) as num_transitions
FROM transitions GROUP BY transitions.from_address, transitions.to_address;
"""
cur = conn.cursor()
try:
cur.execute(drop_ownership_transitions)
cur.execute(create_ownership_transitions)
conn.commit()
except Exception as e:
conn.rollback()
logger.error(f"Could not create derived dataset: {table_name}")
logger.error(e)

Wyświetl plik

@ -0,0 +1,161 @@
import logging
import sqlite3
from typing import Any, cast, Iterator, List, Optional, Set
import json
from tqdm import tqdm
import requests
from .data import BlockBounds, EventType, NFTEvent, event_types
from .datastore import (
get_checkpoint_offset,
get_events_for_enrich,
insert_address_metadata,
insert_checkpoint,
insert_events,
update_events_batch,
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class EthereumBatchloader:
def __init__(self, jsonrpc_url) -> None:
self.jsonrpc_url = jsonrpc_url
self.message_number = 0
self.commands: List[Any] = []
self.requests_banch: List[Any] = []
def load_blocks(self, block_list: List[int], with_transactions: bool):
"""
Request list of blocks
"""
rpc = [
{
"jsonrpc": "2.0",
"id": index,
"method": "eth_getBlockByNumber",
"params": params_single,
}
for index, params_single in enumerate(
[[hex(block_number), with_transactions] for block_number in block_list]
)
]
response = self.send_json_message(rpc)
return response
def load_transactions(self, transaction_hashes: List[str]):
"""
Request list of transactions
"""
rpc = [
{
"jsonrpc": "2.0",
"method": "eth_getTransactionByHash",
"id": index,
"params": [tx_hash],
}
for index, tx_hash in enumerate(transaction_hashes)
]
response = self.send_json_message(rpc)
return response
def send_message(self, payload):
headers = {"Content-Type": "application/json"}
try:
r = requests.post(
self.jsonrpc_url, headers=headers, data=payload, timeout=300
)
except Exception as e:
print(e)
raise e
return r
def send_json_message(self, message):
encoded_json = json.dumps(message)
raw_response = self.send_message(encoded_json.encode("utf8"))
response = raw_response.json()
return response
def enrich_from_web3(
nft_events: List[NFTEvent],
batch_loader: EthereumBatchloader,
) -> List[NFTEvent]:
"""
Adds block number, value, timestamp from web3 if they are None (because that transaction is missing in db)
"""
transactions_to_query = set()
indices_to_update: List[int] = []
for index, nft_event in enumerate(nft_events):
if (
nft_event.block_number == "None"
or nft_event.value == "None"
or nft_event.timestamp == "None"
):
transactions_to_query.add(nft_event.transaction_hash)
indices_to_update.append(index)
if len(transactions_to_query) == 0:
return nft_events
logger.info("Calling JSON RPC API")
jsonrpc_transactions_response = batch_loader.load_transactions(
list(transactions_to_query)
)
transactions_map = {
result["result"]["hash"]: (
int(result["result"]["value"], 16),
int(result["result"]["blockNumber"], 16),
)
for result in jsonrpc_transactions_response
}
blocks_to_query: Set[int] = set()
for index in indices_to_update:
nft_events[index].value, nft_events[index].block_number = transactions_map[
nft_events[index].transaction_hash
]
blocks_to_query.add(cast(int, nft_events[index].block_number))
if len(blocks_to_query) == 0:
return nft_events
jsonrpc_blocks_response = batch_loader.load_blocks(list(blocks_to_query), False)
blocks_map = {
int(result["result"]["number"], 16): int(result["result"]["timestamp"], 16)
for result in jsonrpc_blocks_response
}
for index in indices_to_update:
nft_events[index].timestamp = blocks_map[cast(int, nft_event.block_number)]
return nft_events
def enrich(
datastore_conn: sqlite3.Connection,
event_type: EventType,
batch_loader: EthereumBatchloader,
batch_size: int = 1000,
) -> None:
events = get_events_for_enrich(datastore_conn, event_type)
events_batch = []
for event in tqdm(events, f"Processing events for {event_type.value} event type"):
events_batch.append(event)
if len(events_batch) == batch_size:
logger.info("Getting data from JSONrpc")
enriched_events = enrich_from_web3(
events_batch,
batch_loader,
)
update_events_batch(datastore_conn, enriched_events)
events_batch = []
logger.info("Getting data from JSONrpc")
enriched_events = enrich_from_web3(
events_batch,
batch_loader,
)
update_events_batch(datastore_conn, enriched_events)

Wyświetl plik

@ -0,0 +1,186 @@
import logging
import sqlite3
from typing import Any, cast, Iterator, List, Optional, Set
import json
from moonstreamdb.models import (
EthereumAddress,
EthereumLabel,
EthereumTransaction,
EthereumBlock,
)
from sqlalchemy import or_, and_
from sqlalchemy.orm import Session
from tqdm import tqdm
from web3 import Web3
import requests
from .data import BlockBounds, EventType, NFTEvent, NFTMetadata, event_types
from .datastore import (
get_checkpoint_offset,
insert_address_metadata,
insert_checkpoint,
insert_events,
)
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def add_events(
datastore_conn: sqlite3.Connection,
db_session: Session,
event_type: EventType,
initial_offset=0,
bounds: Optional[BlockBounds] = None,
batch_size: int = 10,
) -> None:
raw_created_at_list = (
db_session.query(EthereumLabel.created_at)
.filter(EthereumLabel.label == event_type.value)
.order_by(EthereumLabel.created_at.asc())
.distinct(EthereumLabel.created_at)
).all()
created_at_list = [
created_at[0] for created_at in raw_created_at_list[initial_offset:]
]
query = (
db_session.query(
EthereumLabel.id,
EthereumLabel.label,
EthereumAddress.address,
EthereumLabel.label_data,
EthereumLabel.transaction_hash,
EthereumTransaction.value,
EthereumTransaction.block_number,
EthereumBlock.timestamp,
)
.filter(EthereumLabel.label == event_type.value)
.join(EthereumAddress, EthereumLabel.address_id == EthereumAddress.id)
.outerjoin(
EthereumTransaction,
EthereumLabel.transaction_hash == EthereumTransaction.hash,
)
.outerjoin(
EthereumBlock,
EthereumTransaction.block_number == EthereumBlock.block_number,
)
.order_by(EthereumLabel.created_at.asc(),)
)
if bounds is not None:
time_filters = [EthereumTransaction.block_number >= bounds.starting_block]
if bounds.ending_block is not None:
time_filters.append(EthereumTransaction.block_number <= bounds.ending_block)
bounds_filters = [EthereumTransaction.hash == None, and_(*time_filters)]
query = query.filter(or_(*bounds_filters))
pbar = tqdm(total=(len(raw_created_at_list)))
pbar.set_description(f"Processing created ats")
pbar.update(initial_offset)
batch_start = 0
batch_end = batch_start + batch_size
while batch_start <= len(created_at_list):
events = query.filter(
EthereumLabel.created_at.in_(created_at_list[batch_start : batch_end + 1])
).all()
if not events:
continue
raw_events_batch = []
for (
event_id,
label,
address,
label_data,
transaction_hash,
value,
block_number,
timestamp,
) in events:
raw_event = NFTEvent(
event_id=event_id,
event_type=event_types[label],
nft_address=address,
token_id=label_data["tokenId"],
from_address=label_data["from"],
to_address=label_data["to"],
transaction_hash=transaction_hash,
value=value,
block_number=block_number,
timestamp=timestamp,
)
raw_events_batch.append(raw_event)
logger.info(f"Adding {len(raw_events_batch)} to database")
insert_events(
datastore_conn, raw_events_batch
) # TODO REMOVED WEB3 enrich, since node is down
insert_checkpoint(datastore_conn, event_type, batch_end + initial_offset)
pbar.update(batch_end - batch_start + 1)
batch_start = batch_end + 1
batch_end = min(batch_end + batch_size, len(created_at_list))
def create_dataset(
datastore_conn: sqlite3.Connection,
db_session: Session,
event_type: EventType,
bounds: Optional[BlockBounds] = None,
batch_size: int = 10,
) -> None:
"""
Creates Moonstream NFTs dataset in the given SQLite datastore.
"""
offset = get_checkpoint_offset(datastore_conn, event_type)
if offset is not None:
logger.info(f"Found checkpoint for {event_type.value}: offset = {offset}")
else:
offset = 0
logger.info(f"Did not found any checkpoint for {event_type.value}")
if event_type == EventType.ERC721:
add_contracts_metadata(datastore_conn, db_session, offset, batch_size)
else:
add_events(
datastore_conn, db_session, event_type, offset, bounds, batch_size,
)
def add_contracts_metadata(
datastore_conn: sqlite3.Connection,
db_session: Session,
initial_offset: int = 0,
batch_size: int = 1000,
) -> None:
logger.info("Adding erc721 contract metadata")
query = (
db_session.query(EthereumLabel.label_data, EthereumAddress.address)
.filter(EthereumLabel.label == EventType.ERC721.value)
.join(EthereumAddress, EthereumLabel.address_id == EthereumAddress.id)
.order_by(EthereumLabel.created_at, EthereumLabel.address_id)
)
offset = initial_offset
while True:
events = query.offset(offset).limit(batch_size).all()
if not events:
break
offset += len(events)
events_batch: List[NFTMetadata] = []
for label_data, address in events:
events_batch.append(
NFTMetadata(
address=address,
name=label_data.get("name", None),
symbol=label_data.get("symbol", None),
)
)
insert_address_metadata(datastore_conn, events_batch)
insert_checkpoint(datastore_conn, EventType.ERC721, offset)
logger.info(f"Already added {offset}")
logger.info(f"Added total of {offset-initial_offset} nfts metadata")

Wyświetl plik

@ -0,0 +1 @@
img/

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Wyświetl plik

@ -0,0 +1,2 @@
export MOONSTREAM_DB_URI="<connection string for Moonstream database>"
export MOONSTREAM_WEB3_PROVIDER="<URL for HTTP provider or filesystem path for IPC provider>"

Wyświetl plik

@ -0,0 +1,52 @@
from setuptools import find_packages, setup
long_description = ""
with open("README.md") as ifp:
long_description = ifp.read()
setup(
name="nfts",
version="0.0.2",
author="Bugout.dev",
author_email="engineers@bugout.dev",
license="Apache License 2.0",
description="Tools to build, update, and interact with the Moonstream NFTs dataset",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/bugout-dev/moonstream",
platforms="all",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
],
python_requires=">=3.6",
packages=find_packages(),
package_data={"nfts": ["py.typed"]},
zip_safe=False,
install_requires=[
"moonstreamdb",
"humbug",
"numpy",
"pandas",
"requests",
"scipy",
"tqdm",
"web3",
],
extras_require={
"dev": ["black", "mypy", "types-requests"],
"distribute": ["setuptools", "twine", "wheel"],
},
entry_points={
"console_scripts": [
"nfts=nfts.cli:main",
]
},
)

Wyświetl plik

@ -7,7 +7,7 @@ User=ubuntu
Group=www-data
WorkingDirectory=/home/ubuntu/moonstream/db/server
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
ExecStart=/home/ubuntu/moonstream/db/server/moonstreamdb -host 0.0.0.0 -port "${MOONSTREAM_DB_SERVER_PORT}"
ExecStart=/home/ubuntu/moonstream/db/server/moonstreamdb -host 127.0.0.1 -port "${MOONSTREAM_DB_SERVER_PORT}"
SyslogIdentifier=moonstreamdb
[Install]

Wyświetl plik

@ -33,7 +33,10 @@ setup(
package_data={"moonstreamdb": ["py.typed"]},
zip_safe=False,
install_requires=["alembic", "psycopg2-binary", "sqlalchemy"],
extras_require={"dev": ["black", "mypy"]},
extras_require={
"dev": ["black", "mypy"],
"distribute": ["setuptools", "twine", "wheel"],
},
entry_points={
"console_scripts": [
"moonstreamdb=moonstreamdb.cli:main",

Wyświetl plik

@ -1,26 +1,19 @@
import React, {
useState,
useContext,
Suspense,
useEffect,
useLayoutEffect,
} from "react";
import React, { useState, Suspense, useEffect, useLayoutEffect } from "react";
import {
Fade,
Flex,
Heading,
Box,
Image as ChakraImage,
Button,
Center,
chakra,
Stack,
Link,
SimpleGrid,
useMediaQuery,
Grid,
Text,
GridItem,
SimpleGrid,
Image as ChakraImage,
} from "@chakra-ui/react";
import dynamic from "next/dynamic";
import useUser from "../src/core/hooks/useUser";
@ -30,56 +23,15 @@ import {
MIXPANEL_PROPS,
MIXPANEL_EVENTS,
} from "../src/core/providers/AnalyticsProvider/constants";
import UIContext from "../src/core/providers/UIProvider/context";
import { AWS_ASSETS_PATH } from "../src/core/constants";
import mixpanel from "mixpanel-browser";
const SplitWithImage = dynamic(
() => import("../src/components/SplitWithImage"),
{
ssr: false,
}
);
const ConnectedButtons = dynamic(
() => import("../src/components/ConnectedButtons"),
{
ssr: false,
}
);
const RiDashboardFill = dynamic(() =>
import("react-icons/ri").then((mod) => mod.RiDashboardFill)
);
const FaFileContract = dynamic(() =>
import("react-icons/fa").then((mod) => mod.FaFileContract)
);
const GiMeshBall = dynamic(() =>
import("react-icons/gi").then((mod) => mod.GiMeshBall)
);
const GiLogicGateXor = dynamic(() =>
import("react-icons/gi").then((mod) => mod.GiLogicGateXor)
);
const GiSuspicious = dynamic(() =>
import("react-icons/gi").then((mod) => mod.GiSuspicious)
);
const GiHook = dynamic(() =>
import("react-icons/gi").then((mod) => mod.GiHook)
);
const AiFillApi = dynamic(() =>
import("react-icons/ai").then((mod) => mod.AiFillApi)
);
const BiTransfer = dynamic(() =>
import("react-icons/bi").then((mod) => mod.BiTransfer)
);
const IoTelescopeSharp = dynamic(() =>
import("react-icons/io5").then((mod) => mod.IoTelescopeSharp)
);
const HEADING_PROPS = {
fontWeight: "700",
fontSize: ["4xl", "5xl", "4xl", "5xl", "6xl", "7xl"],
@ -94,12 +46,11 @@ const assets = {
pendingTransactions: `${AWS_ASSETS_PATH}/Ethereum+pending+transactions.png`,
priceInformation: `${AWS_ASSETS_PATH}/Price+information.png`,
socialMediaPosts: `${AWS_ASSETS_PATH}/Social+media+posts.png`,
algorithmicFunds: `${AWS_ASSETS_PATH}/algorithmic+funds.png`,
cryptoTraders: `${AWS_ASSETS_PATH}/crypto+traders.png`,
comicWhite: `${AWS_ASSETS_PATH}/moonstream-comic-white.png`,
smartDevelopers: `${AWS_ASSETS_PATH}/smart+contract+developers.png`,
};
const Homepage = () => {
const ui = useContext(UIContext);
const [background, setBackground] = useState("background720");
const [backgroundLoaded720, setBackgroundLoaded720] = useState(false);
const [backgroundLoaded1920, setBackgroundLoaded1920] = useState(false);
@ -254,7 +205,7 @@ const Homepage = () => {
fontWeight="semibold"
color="white"
>
All the crypto data you care about in a single stream
Open source blockchain analytics
</Heading>
<chakra.span
my={12}
@ -262,17 +213,9 @@ const Homepage = () => {
display="inline-block"
color="blue.200"
>
Get all the crypto data you need in a single stream.
From pending transactions in the Ethereum transaction
pool to Elon Musks latest tweets.
</chakra.span>
<chakra.span
fontSize={["md", "2xl", "3xl", "3xl", "3xl", "4xl"]}
display="inline-block"
color="blue.300"
>
Access this data through the Moonstream dashboard or
API
Product analytics for Web3. Moonstream helps you
understand exactly how people are using your smart
contracts.
</chakra.span>
</Stack>
</Flex>
@ -280,16 +223,8 @@ const Homepage = () => {
</chakra.header>
</GridItem>
<GridItem
px="7%"
colSpan="12"
// pt={["20px", "20px", "100px", null, "120px"]}
pt={0}
pb={["20px", "56px", null, "184px"]}
minH="100vh"
>
<GridItem px="7%" colSpan="12" pt={0} minH="100vh">
<chakra.span
// {...HEADING_PROPS}
textAlign="center"
fontWeight="600"
fontSize="lg"
@ -298,13 +233,17 @@ const Homepage = () => {
>
<Text
mb={18}
// mb={[12, 12, 12, null, 48]}
fontSize={["md", "2xl", "3xl", "3xl", "3xl", "4xl"]}
>
{` We believe in financial inclusion. Proprietary technologies
are not financially inclusive. That's why all our software
is `}
<chakra.span display="inline-block" textColor="orange.900">
We believe that the blockchain is for everyone. This
requires complete <b>transparency</b>. Thats why all our
software is{" "}
<chakra.span
display="inline-block"
textColor="orange.900"
as={Link}
href="https://github.com/bugout-dev/moonstream"
>
<i>open source</i>
</chakra.span>
</Text>
@ -313,10 +252,10 @@ const Homepage = () => {
<Heading
{...HEADING_PROPS}
textAlign="center"
mt={16}
pb={[12, 12, 12, null, 48]}
mt={48}
pb={[12, 12, 12, null, 24]}
>
Data you can add to your stream:
See how your smart contracts are being used from:
</Heading>
<SimpleGrid columns={[1, 2, 2, 4, null, 4]}>
<Stack spacing={1} px={1} alignItems="center">
@ -369,193 +308,58 @@ const Homepage = () => {
w="100%"
direction={["column", "row", "column", null, "column"]}
flexWrap={["nowrap", "nowrap", "nowrap", null, "nowrap"]}
pb="66px"
pb="32px"
>
<ConnectedButtons
title={"You are..."}
button1={{
label: "Crypto trader",
link: "/#cryptoTrader",
speedBase={0.3}
title={"You need a fusion of..."}
button4={{
label: "Blockchain analytics",
speed: 1,
// link: "/#analytics",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `scroll to CryptoTrader`,
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Connected buttons: scroll to analytics`,
});
},
}}
button1={{
label: "TX pool real time data",
speed: 9,
// link: "/#txpool",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Connected buttons: scroll to txpool`,
});
},
}}
button2={{
label: "Algorithmic Fund",
link: "/#algoFund",
label: "Exchange price stream",
speed: 6,
// link: "/#exchanges",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `scroll to AlgoFund`,
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Connected buttons: scroll to exchanges`,
});
},
}}
button3={{
label: "Developer",
link: "/#smartDeveloper",
label: "Social media posts",
speed: 3,
// link: "/#smartDeveloper",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `scroll to Developer`,
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Connected buttons: scroll to developer`,
});
},
}}
/>
</Flex>
</GridItem>
<GridItem
px="7%"
colSpan="12"
pt={["1rem", "1rem", "5.125rem", null, "5.125rem"]}
pb={["0", "66px", null, "66px"]}
id="cryptoTrader"
minH={ui.isMobileView ? "100vh" : null}
>
<SplitWithImage
cta={{
label: "I want early access!",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Early access CTA: Crypto trader`,
});
toggleModal("hubspot-trader");
},
}}
elementName={"element1"}
colorScheme="green"
badge={`For crypto traders`}
title={``}
body={``}
bullets={[
{
text: `Subscribe to the defi contracts you care about`,
icon: FaFileContract,
color: "green.50",
bgColor: "green.900",
},
{
text: `Make sense of how others are calling these contracts using Moonstream dashboards.
`,
icon: RiDashboardFill,
color: "green.50",
bgColor: "green.900",
},
{
text: `Get data directly from the transaction pool through our global network of Ethereum nodes`,
icon: GiMeshBall,
color: "green.50",
bgColor: "green.900",
},
]}
imgURL={assets["cryptoTraders"]}
/>
</GridItem>
<GridItem
px="7%"
colSpan="12"
pt={["1rem", "1rem", "5.125rem", null, "5.125rem"]}
pb={["0", "66px", null, "66px"]}
id="algoFund"
minH={ui.isMobileView ? "100vh" : null}
>
<SplitWithImage
cta={{
label: "I want early access!",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Early access CTA: Algo fund`,
});
toggleModal("hubspot-fund");
},
}}
elementName={"element2"}
mirror={true}
colorScheme="orange"
badge={`For algorithmic funds`}
bullets={[
{
text: `Get API access to your stream`,
icon: AiFillApi,
color: "orange.50",
bgColor: "orange.900",
},
{
text: `Set conditions that trigger predefined actions`,
icon: GiLogicGateXor,
color: "orange.50",
bgColor: "orange.900",
},
{
text: `Execute transactions directly on Moonstream nodes`,
icon: BiTransfer,
color: "orange.50",
bgColor: "orange.900",
},
]}
imgURL={assets["algorithmicFunds"]}
/>
</GridItem>
<GridItem
px="7%"
colSpan="12"
pt={["1rem", "1rem", "5.125rem", null, "5.125rem"]}
pb={["0", "66px", null, "66px"]}
id="smartDeveloper"
minH={ui.isMobileView ? "100vh" : null}
>
<SplitWithImage
cta={{
label: "I want early access!",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Early access CTA: developer`,
});
toggleModal("hubspot-developer");
},
}}
socialButton={{
url: "https://github.com/bugout-dev/moonstream/",
network: "github",
label: "See our github",
onClick: () => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Github link in landing page`,
});
},
}}
elementName={"element3"}
colorScheme="blue"
badge={`For smart contract developers`}
bullets={[
{
text: `See how people use your smart contracts`,
icon: IoTelescopeSharp,
color: "blue.50",
bgColor: "blue.900",
},
{
text: `Set up alerts on suspicious activity`,
icon: GiSuspicious,
color: "blue.50",
bgColor: "blue.900",
},
{
text: `Register webhooks to connect your off-chain infrastructure`,
icon: GiHook,
color: "blue.50",
bgColor: "blue.900",
},
]}
imgURL={assets["smartDevelopers"]}
/>
</GridItem>
<GridItem
placeItems="center"
w="100%"
@ -564,24 +368,43 @@ const Homepage = () => {
pb="120px"
>
<Center>
<Button
as={Link}
isExternal
href={"https://discord.gg/K56VNUQGvA"}
size="lg"
variant="solid"
colorScheme="green"
id="test"
onClick={() => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Join our discord`,
});
toggleModal("hubspot");
}}
>
Join our discord
</Button>
<Stack placeContent="center">
<Text fontWeight="500" fontSize="24px">
Want to find out more? Reach out to us on{" "}
<Link
color="orange.900"
onClick={() => {
mixpanel.get_distinct_id() &&
mixpanel.track(
`${MIXPANEL_EVENTS.BUTTON_CLICKED}`,
{
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Join our discord`,
}
);
}}
isExternal
href={"https://discord.gg/K56VNUQGvA"}
>
Discord
</Link>{" "}
or{" "}
<Link
color="orange.900"
onClick={() => {
mixpanel.get_distinct_id() &&
mixpanel.track(
`${MIXPANEL_EVENTS.BUTTON_CLICKED}`,
{
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Early access CTA: developer want to find more button`,
}
);
toggleModal("hubspot-developer");
}}
>
request early access
</Link>
</Text>
</Stack>
</Center>
</GridItem>
</Grid>

Wyświetl plik

@ -1,26 +1,30 @@
import React, { useEffect, useState, useLayoutEffect } from "react";
import {
Heading,
Text,
Flex,
Link,
Stack,
chakra,
useMediaQuery,
useBreakpointValue,
Center,
} from "@chakra-ui/react";
import { DEFAULT_METATAGS, AWS_ASSETS_PATH } from "../../src/core/constants";
export async function getStaticProps() {
return {
props: { metaTags: { ...DEFAULT_METATAGS } },
};
}
import { AWS_ASSETS_PATH } from "../../src/core/constants";
import SplitWithImage from "../../src/components/SplitWithImage";
import mixpanel from "mixpanel-browser";
import {
MIXPANEL_PROPS,
MIXPANEL_EVENTS,
} from "../../src/core/providers/AnalyticsProvider/constants";
const assets = {
background720: `${AWS_ASSETS_PATH}/product-background-720x405.png`,
background1920: `${AWS_ASSETS_PATH}/product-background-720x405.png`,
background2880: `${AWS_ASSETS_PATH}/product-background-720x405.png`,
background3840: `${AWS_ASSETS_PATH}/product-background-720x405.png`,
environment: `${AWS_ASSETS_PATH}/product_comic/environment.png`,
developers: `${AWS_ASSETS_PATH}/product_comic/developers.png`,
meanwhile: `${AWS_ASSETS_PATH}/product_comic/meanwhile.png`,
struggle: `${AWS_ASSETS_PATH}/product_comic/struggle.png`,
solution: `${AWS_ASSETS_PATH}/product_comic/solution.png`,
};
const Product = () => {
@ -131,72 +135,115 @@ const Product = () => {
alignItems="center"
pb={24}
>
<Stack mx={margin} my={12} maxW="1700px" textAlign="justify">
<Heading
as="h2"
size="md"
placeSelf="center"
px={12}
py={2}
borderTopRadius="xl"
>
{`Why you'll love Moonstream`}
</Heading>
<chakra.span pl={2} px={12} py={2}>
<Text mb={3}>
We strive for financial inclusion. With cryptocurrencies becoming
mainstream, now is the time for anyone with a computer and access to
the Internet to utilize this opportunity to make passive income.
Were here to make it easier.
</Text>
<Text mb={3}>
Right now our source of data is Ethereum blockchain. Our goal is to
provide a live view of the transactions taking place on every public
blockchain - from the activity of specific accounts or smart
contracts to updates about general market movements.
</Text>
<Text mb={3}>
This information comes from the blockchains themselves, from their
mempools/transaction pools, and from centralized exchanges, social
media, and the news. This forms a stream of information tailored to
your specific needs.
</Text>
<Text mb={3}>
Were giving you a macro view of the crypto market with direct
access from Moonstream dashboards to execute transactions. You can
also set up programs which execute (on- or off-chain) when your
stream meets certain conditions.
</Text>
<Text mb={3}>
Moonstream is accessible through dashboard, API and webhooks.
</Text>
<Text mb={3}>
Moonstreams financial inclusion goes beyond providing access to
data. All of our work is open source as we do not believe that
proprietary technologies are financially inclusive.
</Text>
<Text mb={3}>
You can read{" "}
<Link
textColor="orange.900"
isExternal
href="https://github.com/bugout-dev/moonstream"
>
our code on GitHub.
</Link>{" "}
and keep track of our progress using{" "}
<Link
textColor="orange.900"
isExternal
href="https://github.com/bugout-dev/moonstream/milestones"
>
the Moonstream milestones
</Link>
.
</Text>
</chakra.span>
<Stack mx={margin} my={[4, 6, 12]} maxW="1700px" textAlign="justify">
<SplitWithImage
py={["12px", "24px", "48px"]}
title={`Smart contracts are starting to dominate blockchain activity`}
elementName={"element1"}
colorScheme="blue"
body={`web3 stands for decentralized automation through smart contracts.
Smart contract developers are building the future of the decentralized web.
`}
imgURL={assets["environment"]}
imgBoxShadow="lg"
/>
<SplitWithImage
mirror
py={["12px", "24px", "48px"]}
elementName={"element1"}
colorScheme="blue"
title={`But smart contract activity can be opaque`}
body={`Even smart contract developers have a difficult time finding out who is using their smart contracts and how.
This makes it difficult for them to improve their users experience and to secure their decentralized applications.`}
imgURL={assets["developers"]}
imgBoxShadow="lg"
/>
<SplitWithImage
elementName={"element1"}
colorScheme="blue"
py={["12px", "24px", "48px"]}
title={`Blockchain explorers are not enough`}
body={`Today, analyzing smart contract activity involves viewing data in or crawling data from blockchain explorers.
The process is tedious and unreliable, and the data is difficult to interpret.
`}
imgURL={assets["struggle"]}
imgBoxShadow="lg"
/>
<SplitWithImage
mirror
elementName={"element1"}
py={["12px", "24px", "48px"]}
colorScheme="blue"
title={`Meanwhile, on Web 2.0`}
body={`Developers on the centralized web have access to tools like Google Analytics and Mixpanel.
They can instantly build dashboards to understand their user journeys and identify any issues that their users may be experiencing.
Nothing like this exists for the decentralized web until now.
`}
imgURL={assets["meanwhile"]}
imgBoxShadow="lg"
/>
<SplitWithImage
elementName={"element1"}
colorScheme="blue"
py={["12px", "24px", "48px"]}
title={`Meet Moonstream!`}
body={`Moonstream brings product analytics to web3.
Instantly get analytics for any smart contract you write.
We dont care which EIPs you implement and which ones you dont, or how custom your code is. Moonstream will immediately start giving you insights into what your users are doing with your contracts.
`}
imgURL={assets["solution"]}
imgBoxShadow="lg"
/>
<Center>
<Stack placeContent="center">
<Text fontWeight="500" fontSize="24px">
To find out more, join us on{" "}
<Link
color="orange.900"
onClick={() => {
mixpanel.get_distinct_id() &&
mixpanel.track(`${MIXPANEL_EVENTS.BUTTON_CLICKED}`, {
[`${MIXPANEL_PROPS.BUTTON_NAME}`]: `Join our discord`,
});
}}
isExternal
href={"https://discord.gg/K56VNUQGvA"}
>
Discord
</Link>{" "}
</Text>
</Stack>
</Center>
</Stack>
</Flex>
);
};
export async function getStaticProps() {
const metaTags = {
title: "Moonstream.to: web3 analytics",
description:
"Moonstream brings product analytics to web3. Instantly get analytics for any smart contract you write.",
keywords:
"blockchain, crypto, data, trading, smart contracts, ethereum, solana, transactions, defi, finance, decentralized, analytics, product",
url: "https://www.moonstream.to/product",
image: `${AWS_ASSETS_PATH}/product_comic/solution.png`,
};
const assetPreload = Object.keys(assets).map((key) => {
return {
rel: "preload",
href: assets[key],
as: "image",
};
});
const preconnects = [{ rel: "preconnect", href: "https://s3.amazonaws.com" }];
const preloads = assetPreload.concat(preconnects);
return {
props: { metaTags, preloads },
};
}
export default Product;

Wyświetl plik

@ -8,8 +8,6 @@ import {
Stack,
ButtonGroup,
Spacer,
Radio,
RadioGroup,
UnorderedList,
ListItem,
Fade,
@ -28,7 +26,6 @@ import {
import StepProgress from "../src/components/StepProgress";
import { ArrowLeftIcon, ArrowRightIcon } from "@chakra-ui/icons";
import Scrollable from "../src/components/Scrollable";
import AnalyticsContext from "../src/core/providers/AnalyticsProvider/context";
import NewSubscription from "../src/components/NewSubscription";
import StreamEntry from "../src/components/StreamEntry";
import SubscriptionsList from "../src/components/SubscriptionsList";
@ -39,8 +36,6 @@ import { FaFilter } from "react-icons/fa";
const Welcome = () => {
const { subscriptionsCache } = useSubscriptions();
const ui = useContext(UIContext);
const { mixpanel, isLoaded, MIXPANEL_PROPS } = useContext(AnalyticsContext);
const [profile, setProfile] = React.useState();
const [showSubscriptionForm, setShowSubscriptionForm] = useBoolean(true);
useEffect(() => {
@ -53,14 +48,6 @@ const Welcome = () => {
ui.setOnboardingStep(index);
};
useEffect(() => {
if (profile && isLoaded) {
mixpanel.people.set({
[`${MIXPANEL_PROPS.USER_SPECIALITY}`]: profile,
});
}
}, [profile, MIXPANEL_PROPS, isLoaded, mixpanel]);
const SubscriptonCreatedCallback = () => {
setShowSubscriptionForm.off();
};
@ -250,48 +237,6 @@ const Welcome = () => {
</AccordionItem>
</Accordion>
</Stack>
<Stack
px={12}
// mt={24}
bgColor="gray.50"
borderRadius="xl"
boxShadow="xl"
py={4}
>
<Heading as="h4" size="md">
Tell us more about your needs
</Heading>
<Text fontWeight="semibold" pl={2}>
In order to create the best possible experience, we would love
to find out some more about you.
</Text>
<Text fontWeight="semibold" pl={2}>
Please tell us what profile describes you best.{" "}
<i>
This is purely analytical data, you can change it anytime
later.
</i>
</Text>
<RadioGroup
position="relative"
onChange={setProfile}
value={profile}
// fontWeight="bold"
colorScheme="orange"
// py={0}
// my={0}
>
<Stack
direction={["column", "row", null]}
justifyContent="space-evenly"
>
<Radio value="trader">I am trading crypto currency</Radio>
<Radio value="fund">I represent investment fund</Radio>
<Radio value="developer">I am developer</Radio>
</Stack>
</RadioGroup>
</Stack>
</Stack>
</Fade>
)}

Wyświetl plik

@ -1,5 +1,13 @@
import React, { useEffect, useRef, useContext } from "react";
import { Flex, Heading, Button, Link, SimpleGrid } from "@chakra-ui/react";
import {
Flex,
Heading,
Button,
Link,
SimpleGrid,
useBreakpointValue,
useMediaQuery,
} from "@chakra-ui/react";
import Xarrow, { useXarrow } from "react-xarrows";
import UIContext from "../core/providers/UIProvider/context";
@ -9,6 +17,9 @@ const ArrowCTA = (props) => {
const box1Ref = useRef(null);
const box2Ref = useRef(null);
const box3Ref = useRef(null);
const box4Ref = useRef(null);
// const gridRow = props.button4 ? [5, 4, 2, null, 2] : [4, 3, 2, null, 2];
const updateXarrow = useXarrow();
@ -17,30 +28,72 @@ const ArrowCTA = (props) => {
// eslint-disable-next-line
}, [ui.isMobileView]);
const xarrowEntrySide = useBreakpointValue({
base: "top",
sm: "left",
md: "top",
lg: "top",
xl: "top",
"2xl": "top",
});
const [isLargerThan580px] = useMediaQuery(["(min-width: 580px)"]);
const buttonWidth = [
"190px",
isLargerThan580px ? "200px" : "140px",
"230px",
null,
"280px",
];
const fontSize = [
undefined,
isLargerThan580px ? undefined : "12px",
undefined,
null,
];
const speedConst = -0.05;
return (
<SimpleGrid
columns={[1, 2, 3, null, 3]}
columns={props.button4 ? [1, 2, 4, null, 4] : [1, 2, 3, null, 3]}
spacing={[10, 0, 10, null, 10]}
placeItems="center"
w="100%"
_after={{}}
>
<Flex
gridColumn={[1, 1, 2, null, 2]}
gridRow={[1, 2, 1, null, 1]}
gridColumn={
props.button4
? [1, 1, `2 / span 2`, null, "2 / span 2"]
: [1, 1, 2, null, 2]
}
// gridColumnStart={props.button4 ? [1, 2] : [0, 1]}
// gridColumnEnd={props.button4 ? [1, 4] : [0, 3]}
gridRow={
props.button4 ? [1, `2 / span 2`, 1, null, 1] : [1, 2, 1, null, 1]
}
// mb={14}
w={["180px", "180px", "250px", null, "250px"]}
// w={["180px", "180px", "250px", null, "250px"]}
w="100%"
// ml="16px"
placeSelf="center"
placeContent="center"
>
<Heading m={0} ref={box0Ref} fontSize={["lg", "lg", "lg", null, "lg"]}>
<Heading
m={0}
ref={box0Ref}
fontSize={["lg", isLargerThan580px ? "lg" : "sm", "lg", null, "lg"]}
>
{props.title}
</Heading>
</Flex>
<Button
as={props.button1.link && Link}
_hover={!props.button1.link && { cursor: "unset" }}
href={props.button1.link ?? null}
gridColumn={[1, 2, 1, null, 1]}
gridRow={[2, 1, 2, null, 2]}
@ -50,8 +103,9 @@ const ArrowCTA = (props) => {
variant="solid"
colorScheme="green"
className="MoonStockSpeciality element1"
w={["180px", "180px", "250px", null, "250px"]}
w={buttonWidth}
onClick={props.button1.onClick}
fontSize={fontSize}
>
{props.button1.label}
</Button>
@ -59,6 +113,7 @@ const ArrowCTA = (props) => {
<Button
as={props.button2.link && Link}
href={props.button2.link ?? null}
_hover={!props.button1.link && { cursor: "unset" }}
gridColumn={[1, 2, 2, null, 2]}
gridRow={[3, 2, 2, null, 2]}
zIndex={10}
@ -67,7 +122,8 @@ const ArrowCTA = (props) => {
variant="solid"
colorScheme="orange"
className="MoonStockSpeciality element2"
w={["180px", "180px", "250px", null, "250px"]}
w={buttonWidth}
fontSize={fontSize}
onClick={props.button2.onClick}
>
{props.button2.label}
@ -76,6 +132,7 @@ const ArrowCTA = (props) => {
<Button
as={props.button3.link && Link}
href={props.button3.link ?? null}
_hover={!props.button1.link && { cursor: "unset" }}
gridColumn={[1, 2, 3, null, 3]}
gridRow={[4, 3, 2, null, 2]}
zIndex={10}
@ -83,46 +140,83 @@ const ArrowCTA = (props) => {
boxShadow="md"
variant="solid"
colorScheme="blue"
w={["180px", "180px", "250px", null, "250px"]}
w={buttonWidth}
fontSize={fontSize}
onClick={props.button3.onClick}
>
{props.button3.label}
</Button>
{props.button4 && (
<Button
as={props.button4.link && Link}
href={props.button4.link ?? null}
_hover={!props.button1.link && { cursor: "unset" }}
gridColumn={[1, 2, 4, null, 4]}
gridRow={[5, 4, 2, null, 2]}
zIndex={10}
ref={box4Ref}
boxShadow="md"
variant="solid"
colorScheme="red"
w={buttonWidth}
fontSize={fontSize}
onClick={props.button4.onClick}
>
{props.button4.label}
</Button>
)}
<Xarrow
// showXarrow={!!box0Ref.current && !!box1Ref.current}
dashness={{
strokeLen: 10,
nonStrokeLen: 15,
animation: -2,
animation: props.speedBase * props.button1.speed,
}}
// animateDrawing={true}
color="#92D050"
startAnchor={xarrowEntrySide ?? "top"}
showHead={false}
start={box0Ref} //can be react ref
end={box1Ref} //or an id
start={box1Ref} //can be react ref
end={box0Ref} //or an id
/>
<Xarrow
dashness={{
strokeLen: 10,
nonStrokeLen: 15,
animation: -1,
animation: props.speedBase * props.button2.speed,
}}
color="#FD5602"
startAnchor={xarrowEntrySide ?? "top"}
showHead={false}
start={box0Ref} //can be react ref
end={box2Ref} //or an id
start={box2Ref} //can be react ref
end={box0Ref} //or an id
/>
<Xarrow
dashness={{
strokeLen: 10,
nonStrokeLen: 15,
animation: -4,
animation: props.speedBase * props.button3.speed,
}}
color="#212990"
startAnchor={xarrowEntrySide ?? "top"}
showHead={false}
start={box0Ref} //can be react ref
end={box3Ref} //or an id
start={box3Ref} //can be react ref
end={box0Ref} //or an id
/>
{props.button4 && (
<Xarrow
dashness={{
strokeLen: 10,
nonStrokeLen: 15,
animation: props.speedBase * props.button4.speed,
}}
color="#C53030"
startAnchor={xarrowEntrySide ?? "top"}
showHead={false}
start={box4Ref} //can be react ref
end={box0Ref} //or an id
/>
)}
</SimpleGrid>
);
};

Wyświetl plik

@ -118,6 +118,14 @@ const Sidebar = () => {
>
Login
</MenuItem>
<MenuItem>
{" "}
<RouterLink href="/product">Product </RouterLink>
</MenuItem>
<MenuItem>
{" "}
<RouterLink href="/team">Team </RouterLink>
</MenuItem>
</Menu>
</SidebarContent>
)}

Wyświetl plik

@ -68,6 +68,8 @@ const SplitWithImage = ({
elementName,
cta,
socialButton,
imgBoxShadow,
py,
}) => {
var buttonSize = useBreakpointValue({
base: { single: "sm", double: "xs" },
@ -94,10 +96,20 @@ const SplitWithImage = ({
return () => observer.unobserve(current);
}, []);
const themeColor = useColorModeValue(
`${colorScheme}.50`,
`${colorScheme}.900`
);
const bgThemeColor = useColorModeValue(
`${colorScheme}.900`,
`${colorScheme}.50`
);
return (
<Container
maxW={"7xl"}
py={0}
maxW={"100%"}
py={py}
className={`fade-in-section ${isVisible ? "is-visible" : ""}`}
ref={domRef}
>
@ -109,31 +121,34 @@ const SplitWithImage = ({
alt={"feature image"}
src={imgURL}
objectFit={"contain"}
boxShadow={imgBoxShadow ?? "inherit"}
/>
</Flex>
)}
<Stack spacing={4} justifyContent="center">
<Stack direction="row">
<Text
id={`MoonBadge ${elementName}`}
// id={`MoonBadge${elementName}`}
textTransform={"uppercase"}
color={useColorModeValue(
`${colorScheme}.50`,
`${colorScheme}.900`
)}
fontWeight={600}
fontSize={"sm"}
bg={useColorModeValue(`${colorScheme}.900`, `${colorScheme}.50`)}
p={2}
alignSelf={mirror && !ui.isMobileView ? "flex-end" : "flex-start"}
rounded={"md"}
<Stack spacing={[2, 4]} justifyContent="center">
{badge && (
<Stack
direction="row"
placeContent={
mirror && !ui.isMobileView ? "flex-end" : "flex-start"
}
>
{badge}
</Text>
</Stack>
<Heading>{title}</Heading>
<Text color={`blue.500`} fontSize={"lg"}>
<Text
id={`MoonBadge ${elementName}`}
textTransform={"uppercase"}
color={themeColor}
fontWeight={600}
fontSize={["xs", "sm"]}
bg={bgThemeColor}
p={[1, 2]}
rounded={"md"}
>
{badge}
</Text>
</Stack>
)}
<Heading size="md">{title}</Heading>
<Text color={`blue.500`} fontSize={["sm", "md", "lg"]}>
{body}
</Text>
<Stack
@ -163,17 +178,19 @@ const SplitWithImage = ({
flexWrap="nowrap"
display={["column", "column", null, "row"]}
>
<Button
colorScheme={colorScheme}
w={["100%", "100%", "fit-content", null]}
maxW={["250px", null, "fit-content"]}
variant="outline"
mt={[0, 0, null, 16]}
size={socialButton ? buttonSize.double : buttonSize.single}
onClick={cta.onClick}
>
{cta.label}
</Button>
{cta && (
<Button
colorScheme={colorScheme}
w={["100%", "100%", "fit-content", null]}
maxW={["250px", null, "fit-content"]}
variant="outline"
mt={[0, 0, null, 16]}
size={socialButton ? buttonSize.double : buttonSize.single}
onClick={cta.onClick}
>
{cta.label}
</Button>
)}
{socialButton && (
<RouteButton
@ -184,7 +201,7 @@ const SplitWithImage = ({
mt={[0, 0, null, 16]}
size={socialButton ? buttonSize.double : buttonSize.single}
variant="outline"
colorScheme="blue"
colorScheme={colorScheme}
leftIcon={<FaGithubSquare />}
>
git clone moonstream
@ -194,12 +211,14 @@ const SplitWithImage = ({
</Stack>
</Stack>
{(!mirror || ui.isMobileView) && (
<Flex justifyContent="center">
<Flex justifyContent="center" alignItems="center">
<Image
rounded={"md"}
alt={"feature image"}
src={imgURL}
objectFit={"contain"}
h="fit-content"
boxShadow={imgBoxShadow ?? "inherit"}
/>
</Flex>
)}

Wyświetl plik

@ -11,3 +11,4 @@ export const queryCacheProps = {
return status === 404 || status === 403 ? false : true;
},
};
export default queryCacheProps;

Wyświetl plik

@ -7,7 +7,7 @@ const Navbar = React.lazy(() => import("../components/Navbar"));
const RootLayout = (props) => {
const ui = useContext(UIContext);
const [showBanner, setShowBanner] = useState(true);
const [showBanner, setShowBanner] = useState(false);
return (
<Flex

Wyświetl plik

@ -0,0 +1,25 @@
#!/usr/bin/env bash
# Deployment script - intended to run on Moonstream node control server
# Main
SCRIPT_DIR="$(realpath $(dirname $0))"
ETHEREUM_GETH_SERVICE="ethereum-node.service"
set -eu
echo
echo
echo "Deploy Geth service if not running already"
if systemctl is-active --quiet "${ETHEREUM_GETH_SERVICE}"
then
echo "Ethereum Geth service ${ETHEREUM_GETH_SERVICE} already running"
else
echo "Replacing Ethereum Geth service definition with ${ETHEREUM_GETH_SERVICE}"
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_GETH_SERVICE}"
cp "${SCRIPT_DIR}/${ETHEREUM_GETH_SERVICE}" "/etc/systemd/system/${ETHEREUM_GETH_SERVICE}"
systemctl daemon-reload
systemctl disable "${ETHEREUM_GETH_SERVICE}"
systemctl restart "${ETHEREUM_GETH_SERVICE}"
sleep 10
fi

Wyświetl plik

@ -0,0 +1,17 @@
[Unit]
Description=Ethereum node Geth client
After=network.target
[Service]
User=ubuntu
Group=www-data
ExecStart=/usr/bin/geth --syncmode snap --cache 4096 \
--port 41380 --datadir /mnt/disks/nodes/ethereum \
--txpool.globalslots 153600 --txpool.globalqueue 3072 \
--http --http.port 18370 --http.api eth,web3,txpool
ExecStop=/bin/kill -s SIGINT -$MAINPID
TimeoutStopSec=300
SyslogIdentifier=ethereum-node
[Install]
WantedBy=multi-user.target