kopia lustrzana https://github.com/bugout-dev/moonstream
Updated engineapi placement
rodzic
43a8e09d35
commit
3c240db2e1
|
@ -0,0 +1,174 @@
|
|||
|
||||
# Created by https://www.toptal.com/developers/gitignore/api/python,visualstudiocode
|
||||
# Edit at https://www.toptal.com/developers/gitignore?templates=python,visualstudiocode
|
||||
|
||||
### Python ###
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
### VisualStudioCode ###
|
||||
.vscode/*
|
||||
!.vscode/settings.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
!.vscode/extensions.json
|
||||
*.code-workspace
|
||||
|
||||
# Local History for Visual Studio Code
|
||||
.history/
|
||||
|
||||
### VisualStudioCode Patch ###
|
||||
# Ignore all local history of files
|
||||
.history
|
||||
.ionide
|
||||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python,visualstudiocode
|
||||
|
||||
# Envionments
|
||||
.engine/
|
||||
.engineapi/
|
||||
.venv/
|
||||
.env/
|
||||
|
||||
# Environment variables
|
||||
dev.env
|
||||
test.env
|
||||
prod.env
|
||||
engineapi.env
|
||||
.secrets/
|
|
@ -0,0 +1,3 @@
|
|||
[settings]
|
||||
profile = black
|
||||
multi_line_output = 3
|
|
@ -0,0 +1,43 @@
|
|||
# lootbox
|
||||
|
||||
Use lootboxes in your game economy with ready to use contracts
|
||||
|
||||
## Deployment
|
||||
|
||||
Deployment with local signer server
|
||||
|
||||
```bash
|
||||
MOONSTREAM_SIGNING_SERVER_IP=127.0.0.1 ./dev.sh
|
||||
```
|
||||
|
||||
## Run frontend
|
||||
|
||||
Do from root directory workspace directory:
|
||||
|
||||
Engine:
|
||||
|
||||
Run dev
|
||||
|
||||
```
|
||||
yarn workspace engine run dev
|
||||
```
|
||||
|
||||
Build
|
||||
|
||||
```
|
||||
yarn workspace engine run build
|
||||
```
|
||||
|
||||
Player:
|
||||
|
||||
Run dev
|
||||
|
||||
```
|
||||
yarn workspace player run dev
|
||||
```
|
||||
|
||||
Build
|
||||
|
||||
```
|
||||
yarn workspace player run build
|
||||
```
|
|
@ -0,0 +1,102 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
PYTHONPATH=".:$PYTHONPATH" alembic "$@"
|
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
|
@ -0,0 +1,87 @@
|
|||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
# from lootbox.models import Base as LootboxBase
|
||||
|
||||
from engineapi.models import Base as EngineBase
|
||||
|
||||
target_metadata = EngineBase.metadata
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
version_table="alembic_version",
|
||||
version_table_schema=EngineBase.metadata.schema,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
version_table="alembic_version",
|
||||
version_table_schema=EngineBase.metadata.schema,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
|
@ -0,0 +1,24 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,83 @@
|
|||
"""Initial
|
||||
|
||||
Revision ID: 04e9f9125c90
|
||||
Revises:
|
||||
Create Date: 2022-04-21 19:29:31.599594
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '04e9f9125c90'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('dropper_contracts',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('blockchain', sa.VARCHAR(length=128), nullable=False),
|
||||
sa.Column('address', sa.VARCHAR(length=256), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_dropper_contracts')),
|
||||
sa.UniqueConstraint('blockchain', 'address', name=op.f('uq_dropper_contracts_blockchain')),
|
||||
sa.UniqueConstraint('id', name=op.f('uq_dropper_contracts_id'))
|
||||
)
|
||||
op.create_index(op.f('ix_dropper_contracts_address'), 'dropper_contracts', ['address'], unique=False)
|
||||
op.create_table('dropper_claims',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('dropper_contract_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('claim_id', sa.BigInteger(), nullable=True),
|
||||
sa.Column('title', sa.VARCHAR(length=128), nullable=True),
|
||||
sa.Column('description', sa.String(), nullable=True),
|
||||
sa.Column('terminus_address', sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column('terminus_pool_id', sa.BigInteger(), nullable=False),
|
||||
sa.Column('claim_block_deadline', sa.BigInteger(), nullable=True),
|
||||
sa.Column('active', sa.Boolean(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.ForeignKeyConstraint(['dropper_contract_id'], ['dropper_contracts.id'], name=op.f('fk_dropper_claims_dropper_contract_id_dropper_contracts'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_dropper_claims')),
|
||||
sa.UniqueConstraint('id', name=op.f('uq_dropper_claims_id'))
|
||||
)
|
||||
op.create_index(op.f('ix_dropper_claims_terminus_address'), 'dropper_claims', ['terminus_address'], unique=False)
|
||||
op.create_index(op.f('ix_dropper_claims_terminus_pool_id'), 'dropper_claims', ['terminus_pool_id'], unique=False)
|
||||
op.create_table('dropper_claimants',
|
||||
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('dropper_claim_id', postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column('address', sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column('amount', sa.BigInteger(), nullable=False),
|
||||
sa.Column('added_by', sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text("TIMEZONE('utc', statement_timestamp())"), nullable=False),
|
||||
sa.ForeignKeyConstraint(['dropper_claim_id'], ['dropper_claims.id'], name=op.f('fk_dropper_claimants_dropper_claim_id_dropper_claims'), ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_dropper_claimants')),
|
||||
sa.UniqueConstraint('id', name=op.f('uq_dropper_claimants_id'))
|
||||
)
|
||||
op.create_index(op.f('ix_dropper_claimants_added_by'), 'dropper_claimants', ['added_by'], unique=False)
|
||||
op.create_index(op.f('ix_dropper_claimants_address'), 'dropper_claimants', ['address'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Manual
|
||||
op.execute("CREATE UNIQUE INDEX uq_dropper_claims_dropper_contract_id_claim_id ON dropper_claims(dropper_contract_id,claim_id) WHERE (claim_id is NOT NULL and active = true);")
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_dropper_claimants_address'), table_name='dropper_claimants')
|
||||
op.drop_index(op.f('ix_dropper_claimants_added_by'), table_name='dropper_claimants')
|
||||
op.drop_table('dropper_claimants')
|
||||
op.drop_index(op.f('ix_dropper_claims_terminus_pool_id'), table_name='dropper_claims')
|
||||
op.drop_index(op.f('ix_dropper_claims_terminus_address'), table_name='dropper_claims')
|
||||
op.drop_table('dropper_claims')
|
||||
op.drop_index(op.f('ix_dropper_contracts_address'), table_name='dropper_contracts')
|
||||
op.drop_table('dropper_contracts')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
# Manual
|
||||
op.execute("DROP INDEX uq_dropper_claims_dropper_contract_id_claim_id")
|
|
@ -0,0 +1,80 @@
|
|||
"""Unique constraints, contract metadata
|
||||
|
||||
Revision ID: 3f2ec6253b7e
|
||||
Revises: 04e9f9125c90
|
||||
Create Date: 2022-04-26 04:53:05.221128
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3f2ec6253b7e"
|
||||
down_revision = "04e9f9125c90"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_unique_constraint(
|
||||
op.f("uq_dropper_claimants_dropper_claim_id"),
|
||||
"dropper_claimants",
|
||||
["dropper_claim_id", "address"],
|
||||
)
|
||||
op.create_unique_constraint(
|
||||
op.f("uq_dropper_claimants_id"), "dropper_claimants", ["id"]
|
||||
)
|
||||
op.alter_column(
|
||||
"dropper_claims",
|
||||
"terminus_address",
|
||||
existing_type=sa.VARCHAR(length=256),
|
||||
nullable=True,
|
||||
)
|
||||
op.alter_column(
|
||||
"dropper_claims", "terminus_pool_id", existing_type=sa.BIGINT(), nullable=True
|
||||
)
|
||||
op.create_unique_constraint(op.f("uq_dropper_claims_id"), "dropper_claims", ["id"])
|
||||
op.add_column(
|
||||
"dropper_contracts", sa.Column("title", sa.VARCHAR(length=128), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"dropper_contracts", sa.Column("description", sa.String(), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"dropper_contracts", sa.Column("image_uri", sa.String(), nullable=True)
|
||||
)
|
||||
op.create_unique_constraint(
|
||||
op.f("uq_dropper_contracts_id"), "dropper_contracts", ["id"]
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(
|
||||
op.f("uq_dropper_contracts_id"), "dropper_contracts", type_="unique"
|
||||
)
|
||||
op.drop_column("dropper_contracts", "image_uri")
|
||||
op.drop_column("dropper_contracts", "description")
|
||||
op.drop_column("dropper_contracts", "title")
|
||||
op.drop_constraint(op.f("uq_dropper_claims_id"), "dropper_claims", type_="unique")
|
||||
op.alter_column(
|
||||
"dropper_claims", "terminus_pool_id", existing_type=sa.BIGINT(), nullable=False
|
||||
)
|
||||
op.alter_column(
|
||||
"dropper_claims",
|
||||
"terminus_address",
|
||||
existing_type=sa.VARCHAR(length=256),
|
||||
nullable=False,
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("uq_dropper_claimants_id"), "dropper_claimants", type_="unique"
|
||||
)
|
||||
op.drop_constraint(
|
||||
op.f("uq_dropper_claimants_dropper_claim_id"),
|
||||
"dropper_claimants",
|
||||
type_="unique",
|
||||
)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,92 @@
|
|||
"""Add leaderboard table add leaderboardscores
|
||||
|
||||
Revision ID: 6b45cfe1799c
|
||||
Revises: 3f2ec6253b7e
|
||||
Create Date: 2022-05-19 21:09:02.690868
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "6b45cfe1799c"
|
||||
down_revision = "3f2ec6253b7e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"leaderboards",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("title", sa.VARCHAR(length=128), nullable=False),
|
||||
sa.Column("description", sa.String(), nullable=True),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_leaderboards")),
|
||||
sa.UniqueConstraint("id", name=op.f("uq_leaderboards_id")),
|
||||
)
|
||||
op.create_table(
|
||||
"leaderboard_scores",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("leaderboard_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("address", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("score", sa.BigInteger(), nullable=False),
|
||||
sa.Column(
|
||||
"points_data", postgresql.JSONB(astext_type=sa.Text()), nullable=True
|
||||
),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["leaderboard_id"],
|
||||
["leaderboards.id"],
|
||||
name=op.f("fk_leaderboard_scores_leaderboard_id_leaderboards"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_leaderboard_scores")),
|
||||
sa.UniqueConstraint("id", name=op.f("uq_leaderboard_scores_id")),
|
||||
sa.UniqueConstraint(
|
||||
"leaderboard_id",
|
||||
"address",
|
||||
name=op.f("uq_leaderboard_scores_leaderboard_id"),
|
||||
),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_leaderboard_scores_address"),
|
||||
"leaderboard_scores",
|
||||
["address"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(
|
||||
op.f("ix_leaderboard_scores_address"), table_name="leaderboard_scores"
|
||||
)
|
||||
op.drop_table("leaderboard_scores")
|
||||
op.drop_table("leaderboards")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,38 @@
|
|||
"""add resource_id column
|
||||
|
||||
Revision ID: 782ac8fe23c8
|
||||
Revises: 815ae0983ef1
|
||||
Create Date: 2022-11-10 13:47:49.486491
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "782ac8fe23c8"
|
||||
down_revision = "815ae0983ef1"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column(
|
||||
"leaderboards",
|
||||
sa.Column("resource_id", postgresql.UUID(as_uuid=True), nullable=True),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_leaderboards_resource_id"),
|
||||
"leaderboards",
|
||||
["resource_id"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f("ix_leaderboards_resource_id"), table_name="leaderboards")
|
||||
op.drop_column("leaderboards", "resource_id")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,28 @@
|
|||
"""Add raw_amount column
|
||||
|
||||
Revision ID: 815ae0983ef1
|
||||
Revises: f0e8022dc814
|
||||
Create Date: 2022-06-08 12:39:35.846110
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "815ae0983ef1"
|
||||
down_revision = "f0e8022dc814"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column(
|
||||
"dropper_claimants", sa.Column("raw_amount", sa.String(), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column("dropper_claimants", "raw_amount")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,154 @@
|
|||
"""registered_contracts and call_requests
|
||||
|
||||
Revision ID: d1be5f227664
|
||||
Revises: 782ac8fe23c8
|
||||
Create Date: 2023-04-10 06:37:44.812202
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d1be5f227664"
|
||||
down_revision = "782ac8fe23c8"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table(
|
||||
"registered_contracts",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("blockchain", sa.VARCHAR(length=128), nullable=False),
|
||||
sa.Column("address", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("contract_type", sa.VARCHAR(length=128), nullable=False),
|
||||
sa.Column("title", sa.VARCHAR(length=128), nullable=False),
|
||||
sa.Column("description", sa.String(), nullable=True),
|
||||
sa.Column("image_uri", sa.String(), nullable=True),
|
||||
sa.Column("moonstream_user_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_registered_contracts")),
|
||||
sa.UniqueConstraint(
|
||||
"blockchain",
|
||||
"address",
|
||||
"contract_type",
|
||||
name=op.f("uq_registered_contracts_blockchain"),
|
||||
),
|
||||
sa.UniqueConstraint("id", name=op.f("uq_registered_contracts_id")),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_registered_contracts_address"),
|
||||
"registered_contracts",
|
||||
["address"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_registered_contracts_blockchain"),
|
||||
"registered_contracts",
|
||||
["blockchain"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_registered_contracts_contract_type"),
|
||||
"registered_contracts",
|
||||
["contract_type"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_registered_contracts_moonstream_user_id"),
|
||||
"registered_contracts",
|
||||
["moonstream_user_id"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_table(
|
||||
"call_requests",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column(
|
||||
"registered_contract_id", postgresql.UUID(as_uuid=True), nullable=False
|
||||
),
|
||||
sa.Column("caller", sa.VARCHAR(length=256), nullable=False),
|
||||
sa.Column("moonstream_user_id", postgresql.UUID(as_uuid=True), nullable=False),
|
||||
sa.Column("method", sa.String(), nullable=False),
|
||||
sa.Column(
|
||||
"parameters", postgresql.JSONB(astext_type=sa.Text()), nullable=False
|
||||
),
|
||||
sa.Column("expires_at", sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column(
|
||||
"created_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"updated_at",
|
||||
sa.DateTime(timezone=True),
|
||||
server_default=sa.text("TIMEZONE('utc', statement_timestamp())"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["registered_contract_id"],
|
||||
["registered_contracts.id"],
|
||||
name=op.f("fk_call_requests_registered_contract_id_registered_contracts"),
|
||||
ondelete="CASCADE",
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id", name=op.f("pk_call_requests")),
|
||||
sa.UniqueConstraint("id", name=op.f("uq_call_requests_id")),
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_call_requests_caller"), "call_requests", ["caller"], unique=False
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_call_requests_expires_at"),
|
||||
"call_requests",
|
||||
["expires_at"],
|
||||
unique=False,
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_call_requests_method"), "call_requests", ["method"], unique=False
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_call_requests_moonstream_user_id"),
|
||||
"call_requests",
|
||||
["moonstream_user_id"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(
|
||||
op.f("ix_call_requests_moonstream_user_id"), table_name="call_requests"
|
||||
)
|
||||
op.drop_index(op.f("ix_call_requests_method"), table_name="call_requests")
|
||||
op.drop_index(op.f("ix_call_requests_expires_at"), table_name="call_requests")
|
||||
op.drop_index(op.f("ix_call_requests_caller"), table_name="call_requests")
|
||||
op.drop_table("call_requests")
|
||||
op.drop_index(
|
||||
op.f("ix_registered_contracts_moonstream_user_id"),
|
||||
table_name="registered_contracts",
|
||||
)
|
||||
op.drop_index(
|
||||
op.f("ix_registered_contracts_contract_type"), table_name="registered_contracts"
|
||||
)
|
||||
op.drop_index(
|
||||
op.f("ix_registered_contracts_blockchain"), table_name="registered_contracts"
|
||||
)
|
||||
op.drop_index(
|
||||
op.f("ix_registered_contracts_address"), table_name="registered_contracts"
|
||||
)
|
||||
op.drop_table("registered_contracts")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,44 @@
|
|||
"""Fix unique constract on registered_contracts to include moonstream_user_id
|
||||
|
||||
Revision ID: dedd8a7d0624
|
||||
Revises: d1be5f227664
|
||||
Create Date: 2023-05-02 15:52:36.654980
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "dedd8a7d0624"
|
||||
down_revision = "d1be5f227664"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(
|
||||
"uq_registered_contracts_blockchain", "registered_contracts", type_="unique"
|
||||
)
|
||||
op.create_unique_constraint(
|
||||
op.f("uq_registered_contracts_blockchain"),
|
||||
"registered_contracts",
|
||||
["blockchain", "moonstream_user_id", "address", "contract_type"],
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_constraint(
|
||||
op.f("uq_registered_contracts_blockchain"),
|
||||
"registered_contracts",
|
||||
type_="unique",
|
||||
)
|
||||
op.create_unique_constraint(
|
||||
"uq_registered_contracts_blockchain",
|
||||
"registered_contracts",
|
||||
["blockchain", "address", "contract_type"],
|
||||
)
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,39 @@
|
|||
"""Added column for signatures
|
||||
|
||||
Revision ID: f0e8022dc814
|
||||
Revises: 6b45cfe1799c
|
||||
Create Date: 2022-05-24 14:19:19.022226
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "f0e8022dc814"
|
||||
down_revision = "6b45cfe1799c"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column(
|
||||
"dropper_claimants", sa.Column("signature", sa.String(), nullable=True)
|
||||
)
|
||||
op.create_index(
|
||||
op.f("ix_dropper_claimants_signature"),
|
||||
"dropper_claimants",
|
||||
["signature"],
|
||||
unique=False,
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(
|
||||
op.f("ix_dropper_claimants_signature"), table_name="dropper_claimants"
|
||||
)
|
||||
op.drop_column("dropper_claimants", "signature")
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
APP_DIR="${APP_DIR:-/home/ubuntu/api/engineapi}"
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
PYTHON_ENV_DIR="${PYTHON_ENV_DIR:-/home/ubuntu/engine-env}"
|
||||
PYTHON="${PYTHON_ENV_DIR}/bin/python"
|
||||
PIP="${PYTHON_ENV_DIR}/bin/pip"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/engine-secrets}"
|
||||
PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
|
||||
|
||||
# API server service file
|
||||
ENGINE_SERVICE_FILE="engine.service"
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
||||
"${PIP}" install --upgrade pip setuptools
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install --exists-action i -r "${APP_DIR}/requirements.txt"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving deployment parameters"
|
||||
if [ ! -d "${SECRETS_DIR}" ]; then
|
||||
mkdir "${SECRETS_DIR}"
|
||||
echo -e "${PREFIX_WARN} Created new secrets directory"
|
||||
fi
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" /home/ubuntu/go/bin/checkenv show aws_ssm+engine:true > "${PARAMETERS_ENV_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add AWS default region to parameters"
|
||||
echo "AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION}" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Engine API server service definition with ${ENGINE_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ENGINE_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ENGINE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ENGINE_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ENGINE_SERVICE_FILE}"
|
|
@ -0,0 +1,16 @@
|
|||
[Unit]
|
||||
Description=Engine API service
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/api/engineapi
|
||||
EnvironmentFile=/home/ubuntu/engine-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/engine-env/bin/uvicorn --proxy-headers --forwarded-allow-ips='127.0.0.1' --host 127.0.0.1 --port 7191 --workers 8 engineapi.api:app
|
||||
SyslogIdentifier=engine
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env sh
|
||||
|
||||
# Expects access to Python environment with the requirements
|
||||
# for this project installed.
|
||||
set -e
|
||||
|
||||
ENGINE_HOST="${ENGINE_HOST:-127.0.0.1}"
|
||||
ENGINE_PORT="${ENGINE_PORT:-7191}"
|
||||
ENGINE_WORKERS="${ENGINE_WORKERS:-1}"
|
||||
|
||||
uvicorn --port "$ENGINE_PORT" --host "$ENGINE_HOST" --workers "$ENGINE_WORKERS" engineapi.api:app
|
|
@ -0,0 +1,78 @@
|
|||
"""
|
||||
ABI utilities, because web3 doesn't do selectors well.
|
||||
"""
|
||||
|
||||
import glob
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from web3 import Web3
|
||||
|
||||
|
||||
def abi_input_signature(input_abi: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Stringifies a function ABI input object according to the ABI specification:
|
||||
https://docs.soliditylang.org/en/v0.5.3/abi-spec.html
|
||||
"""
|
||||
input_type = input_abi["type"]
|
||||
if input_type.startswith("tuple"):
|
||||
component_types = [
|
||||
abi_input_signature(component) for component in input_abi["components"]
|
||||
]
|
||||
input_type = f"({','.join(component_types)}){input_type[len('tuple'):]}"
|
||||
return input_type
|
||||
|
||||
|
||||
def abi_function_signature(function_abi: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Stringifies a function ABI according to the ABI specification:
|
||||
https://docs.soliditylang.org/en/v0.5.3/abi-spec.html
|
||||
"""
|
||||
function_name = function_abi["name"]
|
||||
function_arg_types = [
|
||||
abi_input_signature(input_item) for input_item in function_abi["inputs"]
|
||||
]
|
||||
function_signature = f"{function_name}({','.join(function_arg_types)})"
|
||||
return function_signature
|
||||
|
||||
|
||||
def encode_function_signature(function_abi: Dict[str, Any]) -> Optional[str]:
|
||||
"""
|
||||
Encodes the given function (from ABI) with arguments arg_1, ..., arg_n into its 4 byte signature
|
||||
by calculating:
|
||||
keccak256("<function_name>(<arg_1_type>,...,<arg_n_type>")
|
||||
|
||||
If function_abi is not actually a function ABI (detected by checking if function_abi["type"] == "function),
|
||||
returns None.
|
||||
"""
|
||||
if function_abi["type"] != "function":
|
||||
return None
|
||||
function_signature = abi_function_signature(function_abi)
|
||||
encoded_signature = Web3.keccak(text=function_signature)[:4]
|
||||
return encoded_signature.hex()
|
||||
|
||||
|
||||
def project_abis(project_dir: str) -> Dict[str, List[Dict[str, Any]]]:
|
||||
"""
|
||||
Load all ABIs for project contracts and return then in a dictionary keyed by contract name.
|
||||
|
||||
Inputs:
|
||||
- project_dir
|
||||
Path to brownie project
|
||||
"""
|
||||
build_dir = os.path.join(project_dir, "build", "contracts")
|
||||
build_files = glob.glob(os.path.join(build_dir, "*.json"))
|
||||
|
||||
abis: Dict[str, List[Dict[str, Any]]] = {}
|
||||
|
||||
for filepath in build_files:
|
||||
contract_name, _ = os.path.splitext(os.path.basename(filepath))
|
||||
with open(filepath, "r") as ifp:
|
||||
contract_artifact = json.load(ifp)
|
||||
|
||||
contract_abi = contract_artifact.get("abi", [])
|
||||
|
||||
abis[contract_name] = contract_abi
|
||||
|
||||
return abis
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,65 @@
|
|||
"""
|
||||
Lootbox API.
|
||||
"""
|
||||
import logging
|
||||
import time
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
|
||||
from . import data
|
||||
from .settings import (
|
||||
ORIGINS,
|
||||
)
|
||||
from .routes.dropper import app as dropper_app
|
||||
from .routes.leaderboard import app as leaderboard_app
|
||||
from .routes.admin import app as admin_app
|
||||
from .routes.play import app as play_app
|
||||
from .routes.metatx import app as metatx_app
|
||||
from .version import VERSION
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
tags_metadata = [{"name": "time", "description": "Server timestamp endpoints."}]
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title=f"Engine HTTP API",
|
||||
description="Engine API endpoints.",
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/ping", response_model=data.PingResponse)
|
||||
async def ping_handler() -> data.PingResponse:
|
||||
"""
|
||||
Check server status.
|
||||
"""
|
||||
return data.PingResponse(status="ok")
|
||||
|
||||
|
||||
@app.get("/now", tags=["time"])
|
||||
async def now_handler() -> data.NowResponse:
|
||||
"""
|
||||
Get server current time.
|
||||
"""
|
||||
return data.NowResponse(epoch_time=time.time())
|
||||
|
||||
|
||||
app.mount("/leaderboard", leaderboard_app)
|
||||
app.mount("/drops", dropper_app)
|
||||
app.mount("/admin", admin_app)
|
||||
app.mount("/play", play_app)
|
||||
app.mount("/metatx", metatx_app)
|
|
@ -0,0 +1,180 @@
|
|||
"""
|
||||
Login functionality for Moonstream Engine.
|
||||
|
||||
Login flow relies on an Authorization header passed to Moonstream Engine of the form:
|
||||
Authorization: moonstream <base64-encoded JSON>
|
||||
|
||||
The schema for the JSON object will be as follows:
|
||||
{
|
||||
"address": "<address of account which signed the message>",
|
||||
"deadline": <epoch timestamp after which this header becomes invalid>,
|
||||
"signature": "<signed authorization message>"
|
||||
}
|
||||
|
||||
Authorization messages will be generated pursuant to EIP712 using the following parameters:
|
||||
Domain separator - name: MoonstreamAuthorization, version: <Engine API version>
|
||||
Fields - address ("address" type), deadline: ("uint256" type)
|
||||
"""
|
||||
import argparse
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
from typing import Any, cast, Dict
|
||||
|
||||
from eip712.messages import EIP712Message, _hash_eip191_message
|
||||
from eth_account import Account
|
||||
from eth_account._utils.signing import sign_message_hash
|
||||
import eth_keys
|
||||
from hexbytes import HexBytes
|
||||
from web3 import Web3
|
||||
|
||||
|
||||
AUTH_PAYLOAD_NAME = "MoonstreamAuthorization"
|
||||
AUTH_VERSION = "1"
|
||||
|
||||
# By default, authorizations will remain active for 24 hours.
|
||||
DEFAULT_INTERVAL = 60 * 60 * 24
|
||||
|
||||
|
||||
class MoonstreamAuthorizationVerificationError(Exception):
|
||||
"""
|
||||
Raised when invalid signer is provided.
|
||||
"""
|
||||
|
||||
|
||||
class MoonstreamAuthorizationExpired(Exception):
|
||||
"""
|
||||
Raised when signature is expired by time.
|
||||
"""
|
||||
|
||||
|
||||
class MoonstreamAuthorization(EIP712Message):
|
||||
_name_: "string"
|
||||
_version_: "string"
|
||||
|
||||
address: "address"
|
||||
deadline: "uint256"
|
||||
|
||||
|
||||
def sign_message(message_hash_bytes: HexBytes, private_key: HexBytes) -> HexBytes:
|
||||
|
||||
eth_private_key = eth_keys.keys.PrivateKey(private_key)
|
||||
_, _, _, signed_message_bytes = sign_message_hash(
|
||||
eth_private_key, message_hash_bytes
|
||||
)
|
||||
return signed_message_bytes
|
||||
|
||||
|
||||
def authorize(deadline: int, address: str, private_key: HexBytes) -> Dict[str, Any]:
|
||||
message = MoonstreamAuthorization(
|
||||
_name_=AUTH_PAYLOAD_NAME,
|
||||
_version_=AUTH_VERSION,
|
||||
address=address,
|
||||
deadline=deadline,
|
||||
)
|
||||
|
||||
msg_hash_bytes = HexBytes(_hash_eip191_message(message.signable_message))
|
||||
|
||||
signed_message = sign_message(msg_hash_bytes, private_key)
|
||||
|
||||
api_payload: Dict[str, Any] = {
|
||||
"address": address,
|
||||
"deadline": deadline,
|
||||
"signed_message": signed_message.hex(),
|
||||
}
|
||||
|
||||
return api_payload
|
||||
|
||||
|
||||
def verify(authorization_payload: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Verifies provided signature signer by correct address.
|
||||
"""
|
||||
time_now = int(time.time())
|
||||
web3_client = Web3()
|
||||
address = Web3.toChecksumAddress(cast(str, authorization_payload["address"]))
|
||||
deadline = cast(int, authorization_payload["deadline"])
|
||||
signature = cast(str, authorization_payload["signed_message"])
|
||||
|
||||
message = MoonstreamAuthorization(
|
||||
_name_=AUTH_PAYLOAD_NAME,
|
||||
_version_=AUTH_VERSION,
|
||||
address=address,
|
||||
deadline=deadline,
|
||||
)
|
||||
|
||||
signer_address = web3_client.eth.account.recover_message(
|
||||
message.signable_message, signature=signature
|
||||
)
|
||||
if signer_address != address:
|
||||
raise MoonstreamAuthorizationVerificationError("Invalid signer")
|
||||
|
||||
if deadline < time_now:
|
||||
raise MoonstreamAuthorizationExpired("Deadline exceeded")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def decrypt_keystore(keystore_path: str, password: str) -> HexBytes:
|
||||
with open(keystore_path) as keystore_file:
|
||||
keystore_data = json.load(keystore_file)
|
||||
return keystore_data["address"], Account.decrypt(keystore_data, password)
|
||||
|
||||
|
||||
def handle_authorize(args: argparse.Namespace) -> None:
|
||||
address, private_key = decrypt_keystore(args.signer, args.password)
|
||||
authorization = authorize(args.deadline, address, private_key)
|
||||
print(json.dumps(authorization))
|
||||
|
||||
|
||||
def handle_verify(args: argparse.Namespace) -> None:
|
||||
payload_json = base64.decodebytes(args.payload).decode("utf-8")
|
||||
payload = json.loads(payload_json)
|
||||
verify(payload)
|
||||
print("Verified!")
|
||||
|
||||
|
||||
def generate_cli() -> argparse.ArgumentParser:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Moonstream Engine authorization module"
|
||||
)
|
||||
subcommands = parser.add_subparsers()
|
||||
|
||||
authorize_parser = subcommands.add_parser("authorize")
|
||||
authorize_parser.add_argument(
|
||||
"-t",
|
||||
"--deadline",
|
||||
type=int,
|
||||
default=int(time.time()) + DEFAULT_INTERVAL,
|
||||
help="Authorization deadline (seconds since epoch timestamp).",
|
||||
)
|
||||
authorize_parser.add_argument(
|
||||
"-s",
|
||||
"--signer",
|
||||
required=True,
|
||||
help="Path to signer keyfile (or brownie account name).",
|
||||
)
|
||||
authorize_parser.add_argument(
|
||||
"-p",
|
||||
"--password",
|
||||
required=False,
|
||||
help="(Optional) password for signing account. If you don't provide it here, you will be prompte for it.",
|
||||
)
|
||||
authorize_parser.set_defaults(func=handle_authorize)
|
||||
|
||||
verify_parser = subcommands.add_parser("verify")
|
||||
verify_parser.add_argument(
|
||||
"--payload",
|
||||
type=lambda s: s.encode(),
|
||||
required=True,
|
||||
help="Base64-encoded payload to verify",
|
||||
)
|
||||
verify_parser.set_defaults(func=handle_verify)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = generate_cli()
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
|
@ -0,0 +1,925 @@
|
|||
import argparse
|
||||
import csv
|
||||
import getpass
|
||||
import json
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
from engineapi.models import Leaderboard
|
||||
|
||||
from . import actions
|
||||
from . import db
|
||||
from . import signatures
|
||||
from . import data
|
||||
from . import auth
|
||||
from . import contracts_actions
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def signing_server_list_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
instances = signatures.list_signing_instances(
|
||||
signing_instances=[] if args.instance is None else [args.instance]
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /list exception: {err}")
|
||||
return
|
||||
|
||||
print(data.SignerListResponse(instances=instances).json())
|
||||
|
||||
|
||||
def signing_server_wakeup_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
run_instances = signatures.wakeup_signing_instances(
|
||||
run_confirmed=args.confirmed, dry_run=args.dry_run
|
||||
)
|
||||
except signatures.AWSRunInstancesFail:
|
||||
return
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /wakeup exception: {err}")
|
||||
return
|
||||
|
||||
print(data.SignerWakeupResponse(instances=run_instances).json())
|
||||
|
||||
|
||||
def signing_server_sleep_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
terminated_instances = signatures.sleep_signing_instances(
|
||||
signing_instances=[args.instance],
|
||||
termination_confirmed=args.confirmed,
|
||||
dry_run=args.dry_run,
|
||||
)
|
||||
except signatures.AWSDescribeInstancesFail:
|
||||
return
|
||||
except signatures.SigningInstancesNotFound:
|
||||
return
|
||||
except signatures.SigningInstancesTerminationLimitExceeded:
|
||||
return
|
||||
except signatures.AWSTerminateInstancesFail:
|
||||
return
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /sleep exception: {err}")
|
||||
return
|
||||
|
||||
print(data.SignerSleepResponse(instances=list(terminated_instances)).json())
|
||||
|
||||
|
||||
def create_dropper_contract_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
created_contract = actions.create_dropper_contract(
|
||||
db_session=db_session,
|
||||
blockchain=args.blockchain,
|
||||
dropper_contract_address=args.address,
|
||||
title=args.title,
|
||||
description=args.description,
|
||||
image_uri=args.image_uri,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /create_dropper_contract exception: {err}")
|
||||
return
|
||||
print(created_contract)
|
||||
|
||||
|
||||
def delete_dropper_contract_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
removed_contract = actions.delete_dropper_contract(
|
||||
db_session=db_session,
|
||||
blockchain=args.blockchain,
|
||||
dropper_contract_address=args.address,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /delete_dropper_contract exception: {err}")
|
||||
return
|
||||
print(removed_contract)
|
||||
|
||||
|
||||
def list_dropper_contracts_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
results = actions.list_dropper_contracts(
|
||||
db_session=db_session, blockchain=args.blockchain
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /list_dropper_contracts exception: {err}")
|
||||
return
|
||||
print(
|
||||
"\n".join(
|
||||
[
|
||||
data.DropperContractResponse(
|
||||
id=result.id,
|
||||
blockchain=result.blockchain,
|
||||
address=result.address,
|
||||
title=result.title,
|
||||
description=result.description,
|
||||
image_uri=result.image_uri,
|
||||
).json()
|
||||
for result in results
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def dropper_create_drop_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
created_claim = actions.create_claim(
|
||||
db_session=db_session,
|
||||
dropper_contract_id=args.dropper_contract_id,
|
||||
claim_id=args.claim_id,
|
||||
title=args.title,
|
||||
description=args.description,
|
||||
terminus_address=args.terminus_address,
|
||||
terminus_pool_id=args.terminus_pool_id,
|
||||
claim_block_deadline=args.block_deadline,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /create_dropper_claim exception: {err}")
|
||||
return
|
||||
print(created_claim)
|
||||
|
||||
|
||||
def dropper_activate_drop_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
activated_claim = actions.activate_claim(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=args.dropper_claim_id,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled exception: {err}")
|
||||
return
|
||||
print(activated_claim)
|
||||
|
||||
|
||||
def dropper_deactivate_drop_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
deactivated_claim = actions.deactivate_claim(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=args.dropper_claim_id,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled exception: {err}")
|
||||
return
|
||||
print(deactivated_claim)
|
||||
|
||||
|
||||
def dropper_admin_pool_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
(
|
||||
blockchain,
|
||||
terminus_address,
|
||||
terminus_pool_id,
|
||||
) = actions.get_claim_admin_pool(
|
||||
db_session=db_session, dropper_claim_id=args.id
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled exception: {err}")
|
||||
return
|
||||
|
||||
print(
|
||||
f"Blockchain: {blockchain}, Terminus address: {terminus_address}, Pool ID: {terminus_pool_id}"
|
||||
)
|
||||
|
||||
|
||||
def dropper_list_drops_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
dropper_claims = actions.list_claims(
|
||||
db_session=db_session,
|
||||
dropper_contract_id=args.dropper_contract_id,
|
||||
active=args.active,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /list_dropper_claims exception: {err}")
|
||||
return
|
||||
print(dropper_claims)
|
||||
|
||||
|
||||
def dropper_delete_drop_handler(args: argparse.Namespace) -> None:
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
removed_claim = actions.delete_claim(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=args.dropper_claim_id,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /delete_dropper_claim exception: {err}")
|
||||
return
|
||||
print(removed_claim)
|
||||
|
||||
|
||||
def add_claimants_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Load list of claimats from csv file and add them to the database.
|
||||
"""
|
||||
|
||||
claimants = []
|
||||
|
||||
with open(args.claimants_file, "r") as f:
|
||||
reader = csv.DictReader(f)
|
||||
|
||||
for row in reader:
|
||||
if len(row) != 2:
|
||||
logger.error(f"Invalid row: {row}")
|
||||
raise Exception("Invalid row")
|
||||
claimants.append({"address": row["address"], "amount": row["amount"]})
|
||||
|
||||
# format as DropAddClaimantsRequest
|
||||
|
||||
claimants = data.DropAddClaimantsRequest(
|
||||
dropper_claim_id=args.dropper_claim_id, claimants=claimants
|
||||
)
|
||||
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
claimants = actions.add_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=claimants.dropper_claim_id,
|
||||
claimants=claimants.claimants,
|
||||
added_by="cli",
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /add_claimants exception: {err}")
|
||||
return
|
||||
print(data.ClaimantsResponse(claimants=claimants).json())
|
||||
|
||||
|
||||
def delete_claimants_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Read csv file and remove addresses in that list from claim
|
||||
"""
|
||||
|
||||
import csv
|
||||
|
||||
addresses = []
|
||||
|
||||
with open(args.claimants_file, "r") as f:
|
||||
reader = csv.DictReader(f)
|
||||
|
||||
for row in reader:
|
||||
if len(row) != 1:
|
||||
logger.error(f"Invalid row: {row}")
|
||||
raise Exception("Invalid row")
|
||||
addresses.append(row["address"])
|
||||
|
||||
# format as DropRemoveClaimantsRequest
|
||||
|
||||
removing_addresses = data.DropRemoveClaimantsRequest(
|
||||
dropper_claim_id=args.dropper_claim_id, addresses=addresses
|
||||
)
|
||||
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
addresses = actions.delete_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=removing_addresses.dropper_claim_id,
|
||||
addresses=removing_addresses.addresses,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /delete_claimants exception: {err}")
|
||||
return
|
||||
print(data.RemoveClaimantsResponse(addresses=addresses).json())
|
||||
|
||||
|
||||
def list_claimants_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
List claimants for a claim
|
||||
"""
|
||||
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
claimants = actions.get_claimants(
|
||||
db_session=db_session, dropper_claim_id=args.dropper_claim_id
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /list_claimants exception: {err}")
|
||||
return
|
||||
print(claimants)
|
||||
|
||||
|
||||
def add_scores_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Adding scores to leaderboard
|
||||
"""
|
||||
with open(args.input_file, "r") as f:
|
||||
json_input = json.load(f)
|
||||
|
||||
try:
|
||||
new_scores = [data.Score(**score) for score in json_input]
|
||||
except Exception as err:
|
||||
logger.error(f"Can't parse json input in score format")
|
||||
logger.error(f"Invalid input: {err}")
|
||||
return
|
||||
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
scores = actions.add_scores(
|
||||
db_session=db_session,
|
||||
leaderboard_id=args.leaderboard_id,
|
||||
scores=new_scores,
|
||||
overwrite=args.overwrite,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /add_scores exception: {err}")
|
||||
return
|
||||
|
||||
|
||||
def list_leaderboards_handler(args: argparse.Namespace) -> None:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
Leaderboards = actions.list_leaderboards(
|
||||
db_session=db_session,
|
||||
limit=args.limit,
|
||||
offset=args.offset,
|
||||
)
|
||||
|
||||
print(Leaderboards)
|
||||
|
||||
|
||||
def create_leaderboard_handler(args: argparse.Namespace) -> None:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
Leaderboard = actions.create_leaderboard(
|
||||
db_session=db_session,
|
||||
title=args.title,
|
||||
description=args.description,
|
||||
)
|
||||
|
||||
print(Leaderboard)
|
||||
|
||||
|
||||
def assign_resource_handler(args: argparse.Namespace) -> None:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
resource_id = actions.assign_resource(
|
||||
db_session=db_session,
|
||||
resource_id=args.resource_id,
|
||||
leaderboard_id=args.leaderboard_id,
|
||||
)
|
||||
logger.info(
|
||||
f"leaderboard:{args.leaderboard_id} assign resource_id:{resource_id}"
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /assign_resource exception: {err}")
|
||||
return
|
||||
|
||||
|
||||
def list_resources_handler(args: argparse.Namespace) -> None:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
resources = actions.list_leaderboards_resources(db_session=db_session)
|
||||
|
||||
logger.info(resources)
|
||||
|
||||
|
||||
def revoke_resource_handler(args: argparse.Namespace) -> None:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
try:
|
||||
resource = actions.revoke_resource(
|
||||
db_session=db_session,
|
||||
leaderboard_id=args.leaderboard_id,
|
||||
)
|
||||
logger.info(
|
||||
f"leaderboard:{args.leaderboard_id} revoke resource current resource_id:{resource}"
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Unhandled /revoke_resource exception: {err}")
|
||||
return
|
||||
|
||||
|
||||
def add_user_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Add permission to resource cross bugout api.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def delete_user_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Delete read access from resource cross bugout api.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
def sign_handler(args: argparse.Namespace) -> None:
|
||||
# Prompt user to enter the password for their signing account
|
||||
password_raw = getpass.getpass(
|
||||
prompt=f"Enter password for signing account ({args.signer}): "
|
||||
)
|
||||
password = password_raw.strip()
|
||||
signer = signatures.create_account_signer(args.signer, password)
|
||||
signed_message = signer.sign_message(args.message)
|
||||
print(signed_message)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="engineapi: The command line interface to Moonstream Engine API"
|
||||
)
|
||||
parser.set_defaults(func=lambda _: parser.print_help())
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
parser_sign = subparsers.add_parser("sign", description="Manually sign a message")
|
||||
parser_sign.add_argument(
|
||||
"-m", "--message", required=True, type=str, help="Message to sign (hex bytes)"
|
||||
)
|
||||
parser_sign.add_argument(
|
||||
"-s",
|
||||
"--signer",
|
||||
required=True,
|
||||
type=str,
|
||||
help="Path to keystore file for signer",
|
||||
)
|
||||
parser_sign.set_defaults(func=sign_handler)
|
||||
|
||||
# Signing server parser
|
||||
parser_signing_server = subparsers.add_parser(
|
||||
"signing-server", description="Signing server commands"
|
||||
)
|
||||
parser_signing_server.set_defaults(
|
||||
func=lambda _: parser_signing_server.print_help()
|
||||
)
|
||||
subparsers_signing_server = parser_signing_server.add_subparsers(
|
||||
description="Signing server commands"
|
||||
)
|
||||
|
||||
parser_signing_server_list = subparsers_signing_server.add_parser(
|
||||
"list", description="List signing servers"
|
||||
)
|
||||
parser_signing_server_list.add_argument(
|
||||
"-i",
|
||||
"--instance",
|
||||
type=str,
|
||||
help="Instance id to get",
|
||||
)
|
||||
parser_signing_server_list.set_defaults(func=signing_server_list_handler)
|
||||
|
||||
parser_signing_server_wakeup = subparsers_signing_server.add_parser(
|
||||
"wakeup", description="Run signing server"
|
||||
)
|
||||
parser_signing_server_wakeup.add_argument(
|
||||
"-c",
|
||||
"--confirmed",
|
||||
action="store_true",
|
||||
help="Provide confirmation flag to run signing instance",
|
||||
)
|
||||
parser_signing_server_wakeup.add_argument(
|
||||
"-d",
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Dry-run flag simulate instance start, using to check proper permissions",
|
||||
)
|
||||
parser_signing_server_wakeup.set_defaults(func=signing_server_wakeup_handler)
|
||||
|
||||
parser_signing_server_sleep = subparsers_signing_server.add_parser(
|
||||
"sleep", description="Terminate signing server"
|
||||
)
|
||||
parser_signing_server_sleep.add_argument(
|
||||
"-i",
|
||||
"--instance",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Instance id to terminate",
|
||||
)
|
||||
parser_signing_server_sleep.add_argument(
|
||||
"-c",
|
||||
"--confirmed",
|
||||
action="store_true",
|
||||
help="Provide confirmation flag to terminate signing instance",
|
||||
)
|
||||
parser_signing_server_sleep.add_argument(
|
||||
"-d",
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Dry-run flag simulate instance termination, using to check proper permissions",
|
||||
)
|
||||
parser_signing_server_sleep.set_defaults(func=signing_server_sleep_handler)
|
||||
|
||||
# Auth parser
|
||||
auth_parser = auth.generate_cli()
|
||||
subparsers.add_parser("auth", parents=[auth_parser], add_help=False)
|
||||
|
||||
# engine-database parser
|
||||
parser_engine_database = subparsers.add_parser(
|
||||
"engine-db", description="engine-db commands"
|
||||
)
|
||||
parser_engine_database.set_defaults(
|
||||
func=lambda _: parser_engine_database.print_help()
|
||||
)
|
||||
subparsers_engine_database = parser_engine_database.add_subparsers(
|
||||
description="Engine-db commands"
|
||||
)
|
||||
|
||||
parser_leaderboard = subparsers_engine_database.add_parser(
|
||||
"leaderboard", description="Leaderboard db commands"
|
||||
)
|
||||
parser_leaderboard.set_defaults(func=lambda _: parser_leaderboard.print_help())
|
||||
|
||||
subparsers_leaderboard = parser_leaderboard.add_subparsers(
|
||||
description="Leaderboard db commands"
|
||||
)
|
||||
|
||||
parser_leaderboard_create = subparsers_leaderboard.add_parser(
|
||||
"create-leaderboard", description="Create dropper contract"
|
||||
)
|
||||
parser_leaderboard_create.add_argument(
|
||||
"-t",
|
||||
"--title",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Leaderboard title",
|
||||
)
|
||||
parser_leaderboard_create.add_argument(
|
||||
"-d",
|
||||
"--description",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Leaderboard description",
|
||||
)
|
||||
|
||||
parser_leaderboard_create.set_defaults(func=create_leaderboard_handler)
|
||||
|
||||
parser_leaderboards_list = subparsers_leaderboard.add_parser(
|
||||
"list-leaderboards", description="List leaderboards"
|
||||
)
|
||||
parser_leaderboards_list.add_argument(
|
||||
"--limit",
|
||||
type=int,
|
||||
default=10,
|
||||
)
|
||||
parser_leaderboards_list.add_argument("--offset", type=int, default=0)
|
||||
parser_leaderboards_list.set_defaults(func=list_leaderboards_handler)
|
||||
|
||||
parser_leaderboard_score = subparsers_leaderboard.add_parser(
|
||||
"add-scores", description="Add position to leaderboards score"
|
||||
)
|
||||
|
||||
parser_leaderboard_score.add_argument(
|
||||
"--leaderboard-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Contract description",
|
||||
)
|
||||
parser_leaderboard_score.add_argument(
|
||||
"--input-file",
|
||||
type=str,
|
||||
required=True,
|
||||
help="File with scores",
|
||||
)
|
||||
|
||||
parser_leaderboard_score.add_argument("--overwrite", type=bool, default=True)
|
||||
|
||||
parser_leaderboard_score.set_defaults(func=add_scores_handler)
|
||||
|
||||
parser_leaderboard_permissions = subparsers_leaderboard.add_parser(
|
||||
"permissions", description="Manage leaderboard permissions"
|
||||
)
|
||||
|
||||
parser_leaderboard_permissions.set_defaults(
|
||||
func=lambda _: parser_leaderboard_score.print_help()
|
||||
)
|
||||
|
||||
subparsers_leaderboard_permissions = parser_leaderboard_permissions.add_subparsers(
|
||||
description="Manage leaderboard permissions"
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_assign = subparsers_leaderboard_permissions.add_parser(
|
||||
"assign", description="Assign resource to leaderboard"
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_assign.add_argument(
|
||||
"--leaderboard-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Leaderboard id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_assign.add_argument(
|
||||
"--resource-id",
|
||||
type=UUID,
|
||||
required=False,
|
||||
help="Resource id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_assign.set_defaults(func=assign_resource_handler)
|
||||
|
||||
parser_leaderboard_resource_revoke = subparsers_leaderboard_permissions.add_parser(
|
||||
"revoke", description="Revoke resource from leaderboard"
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_revoke.add_argument(
|
||||
"--leaderboard-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Leaderboard id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_revoke.set_defaults(func=revoke_resource_handler)
|
||||
|
||||
parser_leaderboard_resource_list = subparsers_leaderboard_permissions.add_parser(
|
||||
"list", description="List leaderboard resources and ids"
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_list.set_defaults(func=list_resources_handler)
|
||||
|
||||
parser_leaderboard_resource_add_user = (
|
||||
subparsers_leaderboard_permissions.add_parser(
|
||||
"add-user", description="Add to user write access to leaderboard"
|
||||
)
|
||||
)
|
||||
parser_leaderboard_resource_add_user.add_argument(
|
||||
"--leaderboard-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Leaderboard id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_add_user.add_argument(
|
||||
"--user-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="User id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_add_user.set_defaults(func=add_user_handler)
|
||||
|
||||
parser_leaderboard_resource_remove_user = (
|
||||
subparsers_leaderboard_permissions.add_parser(
|
||||
"remove-user", description="Delete write access to leaderboard from user"
|
||||
)
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_remove_user.add_argument(
|
||||
"--leaderboard-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Leaderboard id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_remove_user.add_argument(
|
||||
"--user-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="User id",
|
||||
)
|
||||
|
||||
parser_leaderboard_resource_remove_user.set_defaults(func=delete_user_handler)
|
||||
|
||||
parser_dropper = subparsers_engine_database.add_parser(
|
||||
"dropper", description="Dropper db commands"
|
||||
)
|
||||
parser_dropper.set_defaults(func=lambda _: parser_dropper.print_help())
|
||||
|
||||
subparsers_dropper = parser_dropper.add_subparsers(
|
||||
description="Dropper db commands"
|
||||
)
|
||||
|
||||
parser_dropper_contract_create = subparsers_dropper.add_parser(
|
||||
"create-contract", description="Create dropper contract"
|
||||
)
|
||||
parser_dropper_contract_create.add_argument(
|
||||
"-b",
|
||||
"--blockchain",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Blockchain in wich contract was deployed",
|
||||
)
|
||||
parser_dropper_contract_create.add_argument(
|
||||
"-a",
|
||||
"--address",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Contract address",
|
||||
)
|
||||
parser_dropper_contract_create.add_argument(
|
||||
"-t",
|
||||
"--title",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Contract title",
|
||||
)
|
||||
parser_dropper_contract_create.add_argument(
|
||||
"-d",
|
||||
"--description",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Contract description",
|
||||
)
|
||||
parser_dropper_contract_create.add_argument(
|
||||
"-i",
|
||||
"--image-uri",
|
||||
type=str,
|
||||
required=False,
|
||||
help="Contract image uri",
|
||||
)
|
||||
|
||||
parser_dropper_contract_create.set_defaults(func=create_dropper_contract_handler)
|
||||
|
||||
parser_dropper_contract_list = subparsers_dropper.add_parser(
|
||||
"list-contracts", description="List dropper contracts"
|
||||
)
|
||||
parser_dropper_contract_list.add_argument(
|
||||
"-b",
|
||||
"--blockchain",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Blockchain in wich contract was deployed",
|
||||
)
|
||||
parser_dropper_contract_list.set_defaults(func=list_dropper_contracts_handler)
|
||||
|
||||
parser_dropper_contract_delete = subparsers_dropper.add_parser(
|
||||
"delete-contract", description="Delete dropper contract"
|
||||
)
|
||||
parser_dropper_contract_delete.add_argument(
|
||||
"-b",
|
||||
"--blockchain",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Blockchain in wich contract was deployed",
|
||||
)
|
||||
parser_dropper_contract_delete.add_argument(
|
||||
"-a",
|
||||
"--address",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Contract address",
|
||||
)
|
||||
parser_dropper_contract_delete.set_defaults(func=delete_dropper_contract_handler)
|
||||
|
||||
parser_dropper_create_drop = subparsers_dropper.add_parser(
|
||||
"create-drop", description="Create dropper drop"
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-c",
|
||||
"--dropper-contract-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Dropper contract id",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-t",
|
||||
"--title",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Drop title",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-d",
|
||||
"--description",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Drop description",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-b",
|
||||
"--block-deadline",
|
||||
type=int,
|
||||
required=True,
|
||||
help="Block deadline at which signature will be not returned",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-T",
|
||||
"--terminus-address",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Terminus address",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-p",
|
||||
"--terminus-pool-id",
|
||||
type=int,
|
||||
required=True,
|
||||
help="Terminus pool id",
|
||||
)
|
||||
parser_dropper_create_drop.add_argument(
|
||||
"-m",
|
||||
"--claim-id",
|
||||
type=int,
|
||||
help="Claim id",
|
||||
)
|
||||
|
||||
parser_dropper_create_drop.set_defaults(func=dropper_create_drop_handler)
|
||||
|
||||
parser_dropper_activate_drop = subparsers_dropper.add_parser(
|
||||
"activate-drop", description="Activate dropper drop"
|
||||
)
|
||||
parser_dropper_activate_drop.add_argument(
|
||||
"-c",
|
||||
"--dropper-claim-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Dropper claim id",
|
||||
)
|
||||
parser_dropper_activate_drop.set_defaults(func=dropper_activate_drop_handler)
|
||||
|
||||
parser_dropper_deactivate_drop = subparsers_dropper.add_parser(
|
||||
"deactivate-drop", description="Deactivate dropper drop"
|
||||
)
|
||||
parser_dropper_deactivate_drop.add_argument(
|
||||
"-c",
|
||||
"--dropper-claim-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Dropper claim id",
|
||||
)
|
||||
parser_dropper_deactivate_drop.set_defaults(func=dropper_deactivate_drop_handler)
|
||||
|
||||
parser_dropper_get_claim_admin_pool = subparsers_dropper.add_parser(
|
||||
"admin-pool", description="Get admin pool for drop"
|
||||
)
|
||||
parser_dropper_get_claim_admin_pool.add_argument(
|
||||
"-i", "--id", required=True, help="Dropper Claim ID (Database ID)"
|
||||
)
|
||||
parser_dropper_get_claim_admin_pool.set_defaults(func=dropper_admin_pool_handler)
|
||||
|
||||
parser_dropper_list_drops = subparsers_dropper.add_parser(
|
||||
"list-drops", description="List dropper drops"
|
||||
)
|
||||
parser_dropper_list_drops.add_argument(
|
||||
"-a",
|
||||
"--active",
|
||||
type=bool,
|
||||
required=True,
|
||||
help="Claim is active flag",
|
||||
)
|
||||
parser_dropper_list_drops.add_argument(
|
||||
"-c",
|
||||
"--dropper-contract-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Dropper contract id",
|
||||
)
|
||||
parser_dropper_list_drops.set_defaults(func=dropper_list_drops_handler)
|
||||
|
||||
parser_dropper_delete_drop = subparsers_dropper.add_parser(
|
||||
"delete-drop", description="Delete dropper drop"
|
||||
)
|
||||
parser_dropper_delete_drop.add_argument(
|
||||
"-d",
|
||||
"--dropper-claim-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Drop id in database",
|
||||
)
|
||||
parser_dropper_delete_drop.set_defaults(func=dropper_delete_drop_handler)
|
||||
|
||||
parser_dropper_add_claimants = subparsers_dropper.add_parser(
|
||||
"add-claimants", description="Add claimants to drop"
|
||||
)
|
||||
parser_dropper_add_claimants.add_argument(
|
||||
"-c",
|
||||
"--dropper-claim-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Id of particular claim",
|
||||
)
|
||||
parser_dropper_add_claimants.add_argument(
|
||||
"-f",
|
||||
"--claimants-file",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Csv of claimants addresses",
|
||||
)
|
||||
parser_dropper_add_claimants.set_defaults(func=add_claimants_handler)
|
||||
|
||||
parser_dropper_delete_claimants = subparsers_dropper.add_parser(
|
||||
"delete-claimants", description="Delete claimants from drop"
|
||||
)
|
||||
parser_dropper_delete_claimants.add_argument(
|
||||
"-c",
|
||||
"--dropper-claim-id",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Id of particular claim",
|
||||
)
|
||||
parser_dropper_delete_claimants.add_argument(
|
||||
"-f",
|
||||
"--claimants-file",
|
||||
type=str,
|
||||
required=True,
|
||||
help="Csv of claimants addresses",
|
||||
)
|
||||
parser_dropper_delete_claimants.set_defaults(func=delete_claimants_handler)
|
||||
|
||||
parser_dropper_list_claimants = subparsers_dropper.add_parser(
|
||||
"list-claimants", description="List claimants of drop"
|
||||
)
|
||||
parser_dropper_list_claimants.add_argument(
|
||||
"-c", "--dropper-claim-id", type=str, required=True, help="Dropper claim id"
|
||||
)
|
||||
parser_dropper_list_claimants.set_defaults(func=list_claimants_handler)
|
||||
|
||||
contracts_parser = contracts_actions.generate_cli()
|
||||
subparsers_engine_database.add_parser(
|
||||
"contracts", parents=[contracts_parser], add_help=False
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,177 @@
|
|||
# Code generated by moonworm : https://github.com/bugout-dev/moonworm
|
||||
# Moonworm version : 0.2.4
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
from eth_typing.evm import Address, ChecksumAddress
|
||||
from web3 import Web3
|
||||
from web3.contract import ContractFunction
|
||||
|
||||
from .web3_util import *
|
||||
|
||||
abi_path = os.path.join(os.path.dirname(__file__), "Dropper_abi.json")
|
||||
with open(abi_path, "r") as abi_file:
|
||||
CONTRACT_ABI = json.load(abi_file)
|
||||
|
||||
|
||||
class Contract:
|
||||
def __init__(self, web3: Web3, contract_address: ChecksumAddress):
|
||||
self.web3 = web3
|
||||
self.address = contract_address
|
||||
self.contract = web3.eth.contract(address=self.address, abi=CONTRACT_ABI)
|
||||
|
||||
@staticmethod
|
||||
def constructor() -> ContractConstructor:
|
||||
return ContractConstructor()
|
||||
|
||||
def ERC1155_TYPE(self) -> ContractFunction:
|
||||
return self.contract.functions.ERC1155_TYPE()
|
||||
|
||||
def ERC20_TYPE(self) -> ContractFunction:
|
||||
return self.contract.functions.ERC20_TYPE()
|
||||
|
||||
def ERC721_TYPE(self) -> ContractFunction:
|
||||
return self.contract.functions.ERC721_TYPE()
|
||||
|
||||
def TERMINUS_MINTABLE_TYPE(self) -> ContractFunction:
|
||||
return self.contract.functions.TERMINUS_MINTABLE_TYPE()
|
||||
|
||||
def claim(
|
||||
self, claimId: int, blockDeadline: int, amount: int, signature: bytes
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.claim(claimId, blockDeadline, amount, signature)
|
||||
|
||||
def claimMessageHash(
|
||||
self, claimId: int, claimant: ChecksumAddress, blockDeadline: int, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.claimMessageHash(
|
||||
claimId, claimant, blockDeadline, amount
|
||||
)
|
||||
|
||||
def claimStatus(self, claimId: int) -> ContractFunction:
|
||||
return self.contract.functions.claimStatus(claimId)
|
||||
|
||||
def claimUri(self, claimId: int) -> ContractFunction:
|
||||
return self.contract.functions.claimUri(claimId)
|
||||
|
||||
def createClaim(
|
||||
self, tokenType: int, tokenAddress: ChecksumAddress, tokenId: int, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.createClaim(
|
||||
tokenType, tokenAddress, tokenId, amount
|
||||
)
|
||||
|
||||
def getClaim(self, claimId: int) -> ContractFunction:
|
||||
return self.contract.functions.getClaim(claimId)
|
||||
|
||||
def getClaimStatus(
|
||||
self, claimId: int, claimant: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.getClaimStatus(claimId, claimant)
|
||||
|
||||
def getSignerForClaim(self, claimId: int) -> ContractFunction:
|
||||
return self.contract.functions.getSignerForClaim(claimId)
|
||||
|
||||
def numClaims(self) -> ContractFunction:
|
||||
return self.contract.functions.numClaims()
|
||||
|
||||
def onERC1155BatchReceived(
|
||||
self,
|
||||
arg1: ChecksumAddress,
|
||||
arg2: ChecksumAddress,
|
||||
arg3: List,
|
||||
arg4: List,
|
||||
arg5: bytes,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.onERC1155BatchReceived(
|
||||
arg1, arg2, arg3, arg4, arg5
|
||||
)
|
||||
|
||||
def onERC1155Received(
|
||||
self,
|
||||
arg1: ChecksumAddress,
|
||||
arg2: ChecksumAddress,
|
||||
arg3: int,
|
||||
arg4: int,
|
||||
arg5: bytes,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.onERC1155Received(arg1, arg2, arg3, arg4, arg5)
|
||||
|
||||
def onERC721Received(
|
||||
self,
|
||||
operator: ChecksumAddress,
|
||||
from_: ChecksumAddress,
|
||||
tokenId: int,
|
||||
data: bytes,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.onERC721Received(operator, from_, tokenId, data)
|
||||
|
||||
def owner(self) -> ContractFunction:
|
||||
return self.contract.functions.owner()
|
||||
|
||||
def paused(self) -> ContractFunction:
|
||||
return self.contract.functions.paused()
|
||||
|
||||
def renounceOwnership(self) -> ContractFunction:
|
||||
return self.contract.functions.renounceOwnership()
|
||||
|
||||
def setClaimStatus(self, claimId: int, status: bool) -> ContractFunction:
|
||||
return self.contract.functions.setClaimStatus(claimId, status)
|
||||
|
||||
def setClaimUri(self, claimId: int, uri: str) -> ContractFunction:
|
||||
return self.contract.functions.setClaimUri(claimId, uri)
|
||||
|
||||
def setSignerForClaim(
|
||||
self, claimId: int, signer: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.setSignerForClaim(claimId, signer)
|
||||
|
||||
def supportsInterface(self, interfaceId: bytes) -> ContractFunction:
|
||||
return self.contract.functions.supportsInterface(interfaceId)
|
||||
|
||||
def surrenderPoolControl(
|
||||
self,
|
||||
poolId: int,
|
||||
terminusAddress: ChecksumAddress,
|
||||
newPoolController: ChecksumAddress,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.surrenderPoolControl(
|
||||
poolId, terminusAddress, newPoolController
|
||||
)
|
||||
|
||||
def transferOwnership(self, newOwner: ChecksumAddress) -> ContractFunction:
|
||||
return self.contract.functions.transferOwnership(newOwner)
|
||||
|
||||
def withdrawERC1155(
|
||||
self, tokenAddress: ChecksumAddress, tokenId: int, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.withdrawERC1155(tokenAddress, tokenId, amount)
|
||||
|
||||
def withdrawERC20(
|
||||
self, tokenAddress: ChecksumAddress, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.withdrawERC20(tokenAddress, amount)
|
||||
|
||||
def withdrawERC721(
|
||||
self, tokenAddress: ChecksumAddress, tokenId: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.withdrawERC721(tokenAddress, tokenId)
|
||||
|
||||
|
||||
def deploy(
|
||||
web3: Web3,
|
||||
contract_constructor: ContractFunction,
|
||||
contract_bytecode: str,
|
||||
deployer_address: ChecksumAddress,
|
||||
deployer_private_key: str,
|
||||
) -> Contract:
|
||||
tx_hash, contract_address = deploy_contract_from_constructor_function(
|
||||
web3,
|
||||
constructor=contract_constructor,
|
||||
contract_bytecode=contract_bytecode,
|
||||
contract_abi=CONTRACT_ABI,
|
||||
deployer=deployer_address,
|
||||
deployer_private_key=deployer_private_key,
|
||||
)
|
||||
return Contract(web3, contract_address)
|
|
@ -0,0 +1 @@
|
|||
[{"inputs": [{"internalType": "string", "name": "name", "type": "string"}, {"internalType": "string", "name": "symbol", "type": "string"}], "stateMutability": "nonpayable", "type": "constructor"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "owner", "type": "address"}, {"indexed": true, "internalType": "address", "name": "spender", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "value", "type": "uint256"}], "name": "Approval", "type": "event"}, {"anonymous": false, "inputs": [{"indexed": true, "internalType": "address", "name": "from", "type": "address"}, {"indexed": true, "internalType": "address", "name": "to", "type": "address"}, {"indexed": false, "internalType": "uint256", "name": "value", "type": "uint256"}], "name": "Transfer", "type": "event"}, {"inputs": [{"internalType": "address", "name": "owner", "type": "address"}, {"internalType": "address", "name": "spender", "type": "address"}], "name": "allowance", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "spender", "type": "address"}, {"internalType": "uint256", "name": "amount", "type": "uint256"}], "name": "approve", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "account", "type": "address"}], "name": "balanceOf", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "decimals", "outputs": [{"internalType": "uint8", "name": "", "type": "uint8"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "spender", "type": "address"}, {"internalType": "uint256", "name": "subtractedValue", "type": "uint256"}], "name": "decreaseAllowance", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "spender", "type": "address"}, {"internalType": "uint256", "name": "addedValue", "type": "uint256"}], "name": "increaseAllowance", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "account", "type": "address"}, {"internalType": "uint256", "name": "amount", "type": "uint256"}], "name": "mint", "outputs": [], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [], "name": "name", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "symbol", "outputs": [{"internalType": "string", "name": "", "type": "string"}], "stateMutability": "view", "type": "function"}, {"inputs": [], "name": "totalSupply", "outputs": [{"internalType": "uint256", "name": "", "type": "uint256"}], "stateMutability": "view", "type": "function"}, {"inputs": [{"internalType": "address", "name": "recipient", "type": "address"}, {"internalType": "uint256", "name": "amount", "type": "uint256"}], "name": "transfer", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}, {"inputs": [{"internalType": "address", "name": "sender", "type": "address"}, {"internalType": "address", "name": "recipient", "type": "address"}, {"internalType": "uint256", "name": "amount", "type": "uint256"}], "name": "transferFrom", "outputs": [{"internalType": "bool", "name": "", "type": "bool"}], "stateMutability": "nonpayable", "type": "function"}]
|
|
@ -0,0 +1,88 @@
|
|||
# Code generated by moonworm : https://github.com/bugout-dev/moonworm
|
||||
# Moonworm version : 0.2.4
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
from eth_typing.evm import Address, ChecksumAddress
|
||||
from web3 import Web3
|
||||
from web3.contract import ContractFunction
|
||||
|
||||
from .web3_util import *
|
||||
|
||||
abi_path = os.path.join(os.path.dirname(__file__), "ERC20_abi.json")
|
||||
with open(abi_path, "r") as abi_file:
|
||||
CONTRACT_ABI = json.load(abi_file)
|
||||
|
||||
|
||||
class Contract:
|
||||
def __init__(self, web3: Web3, contract_address: ChecksumAddress):
|
||||
self.web3 = web3
|
||||
self.address = contract_address
|
||||
self.contract = web3.eth.contract(address=self.address, abi=CONTRACT_ABI)
|
||||
|
||||
@staticmethod
|
||||
def constructor(name: str, symbol: str) -> ContractConstructor:
|
||||
return ContractConstructor(name, symbol)
|
||||
|
||||
def allowance(
|
||||
self, owner: ChecksumAddress, spender: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.allowance(owner, spender)
|
||||
|
||||
def approve(self, spender: ChecksumAddress, amount: int) -> ContractFunction:
|
||||
return self.contract.functions.approve(spender, amount)
|
||||
|
||||
def balanceOf(self, account: ChecksumAddress) -> ContractFunction:
|
||||
return self.contract.functions.balanceOf(account)
|
||||
|
||||
def decimals(self) -> ContractFunction:
|
||||
return self.contract.functions.decimals()
|
||||
|
||||
def decreaseAllowance(
|
||||
self, spender: ChecksumAddress, subtractedValue: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.decreaseAllowance(spender, subtractedValue)
|
||||
|
||||
def increaseAllowance(
|
||||
self, spender: ChecksumAddress, addedValue: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.increaseAllowance(spender, addedValue)
|
||||
|
||||
def mint(self, account: ChecksumAddress, amount: int) -> ContractFunction:
|
||||
return self.contract.functions.mint(account, amount)
|
||||
|
||||
def name(self) -> ContractFunction:
|
||||
return self.contract.functions.name()
|
||||
|
||||
def symbol(self) -> ContractFunction:
|
||||
return self.contract.functions.symbol()
|
||||
|
||||
def totalSupply(self) -> ContractFunction:
|
||||
return self.contract.functions.totalSupply()
|
||||
|
||||
def transfer(self, recipient: ChecksumAddress, amount: int) -> ContractFunction:
|
||||
return self.contract.functions.transfer(recipient, amount)
|
||||
|
||||
def transferFrom(
|
||||
self, sender: ChecksumAddress, recipient: ChecksumAddress, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.transferFrom(sender, recipient, amount)
|
||||
|
||||
|
||||
def deploy(
|
||||
web3: Web3,
|
||||
contract_constructor: ContractFunction,
|
||||
contract_bytecode: str,
|
||||
deployer_address: ChecksumAddress,
|
||||
deployer_private_key: str,
|
||||
) -> Contract:
|
||||
tx_hash, contract_address = deploy_contract_from_constructor_function(
|
||||
web3,
|
||||
constructor=contract_constructor,
|
||||
contract_bytecode=contract_bytecode,
|
||||
contract_abi=CONTRACT_ABI,
|
||||
deployer=deployer_address,
|
||||
deployer_private_key=deployer_private_key,
|
||||
)
|
||||
return Contract(web3, contract_address)
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,175 @@
|
|||
# Code generated by moonworm : https://github.com/bugout-dev/moonworm
|
||||
# Moonworm version : 0.2.4
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
from eth_typing.evm import Address, ChecksumAddress
|
||||
from web3 import Web3
|
||||
from web3.contract import ContractFunction
|
||||
|
||||
from .web3_util import *
|
||||
|
||||
abi_path = os.path.join(os.path.dirname(__file__), "Terminus_abi.json")
|
||||
with open(abi_path, "r") as abi_file:
|
||||
CONTRACT_ABI = json.load(abi_file)
|
||||
|
||||
|
||||
class Contract:
|
||||
def __init__(self, web3: Web3, contract_address: ChecksumAddress):
|
||||
self.web3 = web3
|
||||
self.address = contract_address
|
||||
self.contract = web3.eth.contract(address=self.address, abi=CONTRACT_ABI)
|
||||
|
||||
@staticmethod
|
||||
def constructor() -> ContractConstructor:
|
||||
return ContractConstructor()
|
||||
|
||||
def approveForPool(
|
||||
self, poolID: int, operator: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.approveForPool(poolID, operator)
|
||||
|
||||
def balanceOf(self, account: ChecksumAddress, id: int) -> ContractFunction:
|
||||
return self.contract.functions.balanceOf(account, id)
|
||||
|
||||
def balanceOfBatch(self, accounts: List, ids: List) -> ContractFunction:
|
||||
return self.contract.functions.balanceOfBatch(accounts, ids)
|
||||
|
||||
def burn(
|
||||
self, from_: ChecksumAddress, poolID: int, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.burn(from_, poolID, amount)
|
||||
|
||||
def contractURI(self) -> ContractFunction:
|
||||
return self.contract.functions.contractURI()
|
||||
|
||||
def createPoolV1(
|
||||
self, _capacity: int, _transferable: bool, _burnable: bool
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.createPoolV1(_capacity, _transferable, _burnable)
|
||||
|
||||
def createSimplePool(self, _capacity: int) -> ContractFunction:
|
||||
return self.contract.functions.createSimplePool(_capacity)
|
||||
|
||||
def isApprovedForAll(
|
||||
self, account: ChecksumAddress, operator: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.isApprovedForAll(account, operator)
|
||||
|
||||
def isApprovedForPool(
|
||||
self, poolID: int, operator: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.isApprovedForPool(poolID, operator)
|
||||
|
||||
def mint(
|
||||
self, to: ChecksumAddress, poolID: int, amount: int, data: bytes
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.mint(to, poolID, amount, data)
|
||||
|
||||
def mintBatch(
|
||||
self, to: ChecksumAddress, poolIDs: List, amounts: List, data: bytes
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.mintBatch(to, poolIDs, amounts, data)
|
||||
|
||||
def paymentToken(self) -> ContractFunction:
|
||||
return self.contract.functions.paymentToken()
|
||||
|
||||
def poolBasePrice(self) -> ContractFunction:
|
||||
return self.contract.functions.poolBasePrice()
|
||||
|
||||
def poolMintBatch(
|
||||
self, id: int, toAddresses: List, amounts: List
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.poolMintBatch(id, toAddresses, amounts)
|
||||
|
||||
def safeBatchTransferFrom(
|
||||
self,
|
||||
from_: ChecksumAddress,
|
||||
to: ChecksumAddress,
|
||||
ids: List,
|
||||
amounts: List,
|
||||
data: bytes,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.safeBatchTransferFrom(
|
||||
from_, to, ids, amounts, data
|
||||
)
|
||||
|
||||
def safeTransferFrom(
|
||||
self,
|
||||
from_: ChecksumAddress,
|
||||
to: ChecksumAddress,
|
||||
id: int,
|
||||
amount: int,
|
||||
data: bytes,
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.safeTransferFrom(from_, to, id, amount, data)
|
||||
|
||||
def setApprovalForAll(
|
||||
self, operator: ChecksumAddress, approved: bool
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.setApprovalForAll(operator, approved)
|
||||
|
||||
def setContractURI(self, _contractURI: str) -> ContractFunction:
|
||||
return self.contract.functions.setContractURI(_contractURI)
|
||||
|
||||
def setController(self, newController: ChecksumAddress) -> ContractFunction:
|
||||
return self.contract.functions.setController(newController)
|
||||
|
||||
def setPaymentToken(self, newPaymentToken: ChecksumAddress) -> ContractFunction:
|
||||
return self.contract.functions.setPaymentToken(newPaymentToken)
|
||||
|
||||
def setPoolBasePrice(self, newBasePrice: int) -> ContractFunction:
|
||||
return self.contract.functions.setPoolBasePrice(newBasePrice)
|
||||
|
||||
def setPoolController(
|
||||
self, poolID: int, newController: ChecksumAddress
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.setPoolController(poolID, newController)
|
||||
|
||||
def setURI(self, poolID: int, poolURI: str) -> ContractFunction:
|
||||
return self.contract.functions.setURI(poolID, poolURI)
|
||||
|
||||
def supportsInterface(self, interfaceId: bytes) -> ContractFunction:
|
||||
return self.contract.functions.supportsInterface(interfaceId)
|
||||
|
||||
def terminusController(self) -> ContractFunction:
|
||||
return self.contract.functions.terminusController()
|
||||
|
||||
def terminusPoolCapacity(self, poolID: int) -> ContractFunction:
|
||||
return self.contract.functions.terminusPoolCapacity(poolID)
|
||||
|
||||
def terminusPoolController(self, poolID: int) -> ContractFunction:
|
||||
return self.contract.functions.terminusPoolController(poolID)
|
||||
|
||||
def terminusPoolSupply(self, poolID: int) -> ContractFunction:
|
||||
return self.contract.functions.terminusPoolSupply(poolID)
|
||||
|
||||
def totalPools(self) -> ContractFunction:
|
||||
return self.contract.functions.totalPools()
|
||||
|
||||
def uri(self, poolID: int) -> ContractFunction:
|
||||
return self.contract.functions.uri(poolID)
|
||||
|
||||
def withdrawPayments(
|
||||
self, toAddress: ChecksumAddress, amount: int
|
||||
) -> ContractFunction:
|
||||
return self.contract.functions.withdrawPayments(toAddress, amount)
|
||||
|
||||
|
||||
def deploy(
|
||||
web3: Web3,
|
||||
contract_constructor: ContractFunction,
|
||||
contract_bytecode: str,
|
||||
deployer_address: ChecksumAddress,
|
||||
deployer_private_key: str,
|
||||
) -> Contract:
|
||||
tx_hash, contract_address = deploy_contract_from_constructor_function(
|
||||
web3,
|
||||
constructor=contract_constructor,
|
||||
contract_bytecode=contract_bytecode,
|
||||
contract_abi=CONTRACT_ABI,
|
||||
deployer=deployer_address,
|
||||
deployer_private_key=deployer_private_key,
|
||||
)
|
||||
return Contract(web3, contract_address)
|
|
@ -0,0 +1,201 @@
|
|||
import getpass
|
||||
import os
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
|
||||
import web3
|
||||
from eth_account.account import Account # type: ignore
|
||||
from eth_typing.evm import ChecksumAddress
|
||||
from hexbytes.main import HexBytes
|
||||
from web3 import Web3
|
||||
from web3.contract import Contract, ContractFunction
|
||||
from web3.providers.ipc import IPCProvider
|
||||
from web3.providers.rpc import HTTPProvider
|
||||
from web3.types import ABI, Nonce, TxParams, TxReceipt, Wei
|
||||
|
||||
|
||||
class ContractConstructor:
|
||||
def __init__(self, *args: Any):
|
||||
self.args = args
|
||||
|
||||
|
||||
def build_transaction(
|
||||
web3: Web3,
|
||||
builder: Union[ContractFunction, Any],
|
||||
sender: ChecksumAddress,
|
||||
) -> Union[TxParams, Any]:
|
||||
"""
|
||||
Builds transaction json with the given arguments. It is not submitting transaction
|
||||
Arguments:
|
||||
- web3: Web3 client
|
||||
- builder: ContractFunction or other class that has method buildTransaction(TxParams)
|
||||
- sender: `from` value of transaction, address which is sending this transaction
|
||||
- maxFeePerGas: Optional, max priority fee for dynamic fee transactions in Wei
|
||||
- maxPriorityFeePerGas: Optional the part of the fee that goes to the miner
|
||||
"""
|
||||
|
||||
transaction = builder.buildTransaction(
|
||||
{
|
||||
"from": sender,
|
||||
"nonce": get_nonce(web3, sender),
|
||||
}
|
||||
)
|
||||
return transaction
|
||||
|
||||
|
||||
def get_nonce(web3: Web3, address: ChecksumAddress) -> Nonce:
|
||||
"""
|
||||
Returns Nonce: number of transactions for given address
|
||||
"""
|
||||
nonce = web3.eth.get_transaction_count(address)
|
||||
return nonce
|
||||
|
||||
|
||||
def submit_transaction(
|
||||
web3: Web3, transaction: Union[TxParams, Any], signer_private_key: str
|
||||
) -> HexBytes:
|
||||
|
||||
"""
|
||||
Signs and submits json transaction to blockchain from the name of signer
|
||||
"""
|
||||
signed_transaction = web3.eth.account.sign_transaction(
|
||||
transaction, private_key=signer_private_key
|
||||
)
|
||||
return submit_signed_raw_transaction(web3, signed_transaction.rawTransaction)
|
||||
|
||||
|
||||
def submit_signed_raw_transaction(
|
||||
web3: Web3, signed_raw_transaction: HexBytes
|
||||
) -> HexBytes:
|
||||
"""
|
||||
Submits already signed raw transaction.
|
||||
"""
|
||||
transaction_hash = web3.eth.send_raw_transaction(signed_raw_transaction)
|
||||
return transaction_hash
|
||||
|
||||
|
||||
def wait_for_transaction_receipt(web3: Web3, transaction_hash: HexBytes):
|
||||
return web3.eth.wait_for_transaction_receipt(transaction_hash)
|
||||
|
||||
|
||||
def deploy_contract(
|
||||
web3: Web3,
|
||||
contract_bytecode: str,
|
||||
contract_abi: List[Dict[str, Any]],
|
||||
deployer: ChecksumAddress,
|
||||
deployer_private_key: str,
|
||||
constructor_arguments: Optional[List[Any]] = None,
|
||||
) -> Tuple[HexBytes, ChecksumAddress]:
|
||||
"""
|
||||
Deploys smart contract to blockchain
|
||||
Arguments:
|
||||
- web3: web3 client
|
||||
- contract_bytecode: Compiled smart contract bytecode
|
||||
- contract_abi: Json abi of contract. Must include `constructor` function
|
||||
- deployer: Address which is deploying contract. Deployer will pay transaction fee
|
||||
- deployer_private_key: Private key of deployer. Needed for signing and submitting transaction
|
||||
- constructor_arguments: arguments that are passed to `constructor` function of the smart contract
|
||||
"""
|
||||
contract = web3.eth.contract(abi=contract_abi, bytecode=contract_bytecode)
|
||||
if constructor_arguments is None:
|
||||
transaction = build_transaction(web3, contract.constructor(), deployer)
|
||||
else:
|
||||
transaction = build_transaction(
|
||||
web3, contract.constructor(*constructor_arguments), deployer
|
||||
)
|
||||
|
||||
transaction_hash = submit_transaction(web3, transaction, deployer_private_key)
|
||||
transaction_receipt = wait_for_transaction_receipt(web3, transaction_hash)
|
||||
contract_address = transaction_receipt.contractAddress
|
||||
return transaction_hash, web3.toChecksumAddress(contract_address)
|
||||
|
||||
|
||||
def deploy_contract_from_constructor_function(
|
||||
web3: Web3,
|
||||
contract_bytecode: str,
|
||||
contract_abi: List[Dict[str, Any]],
|
||||
deployer: ChecksumAddress,
|
||||
deployer_private_key: str,
|
||||
constructor: ContractConstructor,
|
||||
) -> Tuple[HexBytes, ChecksumAddress]:
|
||||
"""
|
||||
Deploys smart contract to blockchain from constructor ContractFunction
|
||||
Arguments:
|
||||
- web3: web3 client
|
||||
- contract_bytecode: Compiled smart contract bytecode
|
||||
- contract_abi: Json abi of contract. Must include `constructor` function
|
||||
- deployer: Address which is deploying contract. Deployer will pay transaction fee
|
||||
- deployer_private_key: Private key of deployer. Needed for signing and submitting transaction
|
||||
- constructor:`constructor` function of the smart contract
|
||||
"""
|
||||
contract = web3.eth.contract(abi=contract_abi, bytecode=contract_bytecode)
|
||||
transaction = build_transaction(
|
||||
web3, contract.constructor(*constructor.args), deployer
|
||||
)
|
||||
|
||||
transaction_hash = submit_transaction(web3, transaction, deployer_private_key)
|
||||
transaction_receipt = wait_for_transaction_receipt(web3, transaction_hash)
|
||||
contract_address = transaction_receipt.contractAddress
|
||||
return transaction_hash, web3.toChecksumAddress(contract_address)
|
||||
|
||||
|
||||
def decode_transaction_input(web3: Web3, transaction_input: str, abi: Dict[str, Any]):
|
||||
contract = web3.eth.contract(abi=abi)
|
||||
return contract.decode_function_input(transaction_input)
|
||||
|
||||
|
||||
def read_keys_from_cli() -> Tuple[ChecksumAddress, str]:
|
||||
private_key = getpass.getpass(prompt="Enter private key of your address:")
|
||||
account = Account.from_key(private_key)
|
||||
return (Web3.toChecksumAddress(account.address), private_key)
|
||||
|
||||
|
||||
def read_keys_from_env() -> Tuple[ChecksumAddress, str]:
|
||||
private_key = os.environ.get("MOONWORM_ETHEREUM_ADDRESS_PRIVATE_KEY")
|
||||
if private_key is None:
|
||||
raise ValueError(
|
||||
"MOONWORM_ETHEREUM_ADDRESS_PRIVATE_KEY env variable is not set"
|
||||
)
|
||||
try:
|
||||
account = Account.from_key(private_key)
|
||||
return (Web3.toChecksumAddress(account.address), private_key)
|
||||
except:
|
||||
raise ValueError(
|
||||
"Failed to initiate account from MOONWORM_ETHEREUM_ADDRESS_PRIVATE_KEY"
|
||||
)
|
||||
|
||||
|
||||
def connect(web3_uri: str) -> Web3:
|
||||
web3_provider: Union[IPCProvider, HTTPProvider] = Web3.IPCProvider()
|
||||
if web3_uri.startswith("http://") or web3_uri.startswith("https://"):
|
||||
web3_provider = Web3.HTTPProvider(web3_uri)
|
||||
else:
|
||||
web3_provider = Web3.IPCProvider(web3_uri)
|
||||
web3_client = Web3(web3_provider)
|
||||
return web3_client
|
||||
|
||||
|
||||
def read_web3_provider_from_env() -> Web3:
|
||||
provider_path = os.environ.get("MOONWORM_WEB3_PROVIDER_URI")
|
||||
if provider_path is None:
|
||||
raise ValueError("MOONWORM_WEB3_PROVIDER_URI env variable is not set")
|
||||
return connect(provider_path)
|
||||
|
||||
|
||||
def read_web3_provider_from_cli() -> Web3:
|
||||
provider_path = input("Enter web3 uri path: ")
|
||||
return connect(provider_path)
|
||||
|
||||
|
||||
def cast_to_python_type(evm_type: str) -> Callable:
|
||||
if evm_type.startswith(("uint", "int")):
|
||||
return int
|
||||
elif evm_type.startswith("bytes"):
|
||||
return bytes
|
||||
elif evm_type == "string":
|
||||
return str
|
||||
elif evm_type == "address":
|
||||
return Web3.toChecksumAddress
|
||||
elif evm_type == "bool":
|
||||
return bool
|
||||
else:
|
||||
raise ValueError(f"Cannot convert to python type {evm_type}")
|
|
@ -0,0 +1,773 @@
|
|||
import argparse
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from sqlalchemy import func, text
|
||||
from sqlalchemy.exc import IntegrityError, NoResultFound
|
||||
from sqlalchemy.orm import Session
|
||||
from web3 import Web3
|
||||
|
||||
from . import data, db
|
||||
from .data import ContractType
|
||||
from .models import CallRequest, RegisteredContract
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CallRequestNotFound(Exception):
|
||||
"""
|
||||
Raised when call request with the given parameters is not found in the database.
|
||||
"""
|
||||
|
||||
|
||||
class InvalidAddressFormat(Exception):
|
||||
"""
|
||||
Raised when address not pass web3checksum validation.
|
||||
"""
|
||||
|
||||
|
||||
class ContractAlreadyRegistered(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def validate_method_and_params(
|
||||
contract_type: ContractType, method: str, parameters: Dict[str, Any]
|
||||
) -> None:
|
||||
"""
|
||||
Validate the given method and parameters for the specified contract_type.
|
||||
"""
|
||||
if contract_type == ContractType.raw:
|
||||
if method != "":
|
||||
raise ValueError("Method must be empty string for raw contract type")
|
||||
if set(parameters.keys()) != {"calldata"}:
|
||||
raise ValueError(
|
||||
"Parameters must have only 'calldata' key for raw contract type"
|
||||
)
|
||||
elif contract_type == ContractType.dropper:
|
||||
if method != "claim":
|
||||
raise ValueError("Method must be 'claim' for dropper contract type")
|
||||
required_params = {
|
||||
"dropId",
|
||||
"requestID",
|
||||
"blockDeadline",
|
||||
"amount",
|
||||
"signer",
|
||||
"signature",
|
||||
}
|
||||
if set(parameters.keys()) != required_params:
|
||||
raise ValueError(
|
||||
f"Parameters must have {required_params} keys for dropper contract type"
|
||||
)
|
||||
try:
|
||||
Web3.toChecksumAddress(parameters["signer"])
|
||||
except:
|
||||
raise InvalidAddressFormat("Parameter signer must be a valid address")
|
||||
required_params["amount"] = str(required_params["amount"])
|
||||
else:
|
||||
raise ValueError(f"Unknown contract type {contract_type}")
|
||||
|
||||
|
||||
def register_contract(
|
||||
db_session: Session,
|
||||
blockchain: str,
|
||||
address: str,
|
||||
contract_type: ContractType,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
title: Optional[str],
|
||||
description: Optional[str],
|
||||
image_uri: Optional[str],
|
||||
) -> data.RegisteredContract:
|
||||
"""
|
||||
Register a contract against the Engine instance
|
||||
"""
|
||||
try:
|
||||
contract = RegisteredContract(
|
||||
blockchain=blockchain,
|
||||
address=Web3.toChecksumAddress(address),
|
||||
contract_type=contract_type.value,
|
||||
moonstream_user_id=moonstream_user_id,
|
||||
title=title,
|
||||
description=description,
|
||||
image_uri=image_uri,
|
||||
)
|
||||
db_session.add(contract)
|
||||
db_session.commit()
|
||||
except IntegrityError as err:
|
||||
db_session.rollback()
|
||||
raise ContractAlreadyRegistered()
|
||||
except Exception as err:
|
||||
db_session.rollback()
|
||||
logger.error(repr(err))
|
||||
raise
|
||||
|
||||
return contract
|
||||
|
||||
|
||||
def update_registered_contract(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
contract_id: uuid.UUID,
|
||||
title: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
image_uri: Optional[str] = None,
|
||||
ignore_nulls: bool = True,
|
||||
) -> data.RegisteredContract:
|
||||
"""
|
||||
Update the registered contract with the given contract ID provided that the user with moonstream_user_id
|
||||
has access to it.
|
||||
"""
|
||||
query = db_session.query(RegisteredContract).filter(
|
||||
RegisteredContract.id == contract_id,
|
||||
RegisteredContract.moonstream_user_id == moonstream_user_id,
|
||||
)
|
||||
|
||||
contract = query.one()
|
||||
|
||||
if not (title is None and ignore_nulls):
|
||||
contract.title = title
|
||||
if not (description is None and ignore_nulls):
|
||||
contract.description = description
|
||||
if not (image_uri is None and ignore_nulls):
|
||||
contract.image_uri = image_uri
|
||||
|
||||
try:
|
||||
db_session.add(contract)
|
||||
db_session.commit()
|
||||
except Exception as err:
|
||||
logger.error(
|
||||
f"update_registered_contract -- error storing update in database: {repr(err)}"
|
||||
)
|
||||
db_session.rollback()
|
||||
raise
|
||||
|
||||
return contract
|
||||
|
||||
|
||||
def get_registered_contract(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
contract_id: uuid.UUID,
|
||||
) -> RegisteredContract:
|
||||
"""
|
||||
Get registered contract by ID.
|
||||
"""
|
||||
contract = (
|
||||
db_session.query(RegisteredContract)
|
||||
.filter(RegisteredContract.moonstream_user_id == moonstream_user_id)
|
||||
.filter(RegisteredContract.id == contract_id)
|
||||
.one()
|
||||
)
|
||||
return contract
|
||||
|
||||
|
||||
def lookup_registered_contracts(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
blockchain: Optional[str] = None,
|
||||
address: Optional[str] = None,
|
||||
contract_type: Optional[ContractType] = None,
|
||||
limit: int = 10,
|
||||
offset: Optional[int] = None,
|
||||
) -> List[RegisteredContract]:
|
||||
"""
|
||||
Lookup a registered contract
|
||||
"""
|
||||
query = db_session.query(RegisteredContract).filter(
|
||||
RegisteredContract.moonstream_user_id == moonstream_user_id
|
||||
)
|
||||
|
||||
if blockchain is not None:
|
||||
query = query.filter(RegisteredContract.blockchain == blockchain)
|
||||
|
||||
if address is not None:
|
||||
query = query.filter(
|
||||
RegisteredContract.address == Web3.toChecksumAddress(address)
|
||||
)
|
||||
|
||||
if contract_type is not None:
|
||||
query = query.filter(RegisteredContract.contract_type == contract_type.value)
|
||||
|
||||
if offset is not None:
|
||||
query = query.offset(offset)
|
||||
|
||||
query = query.limit(limit)
|
||||
|
||||
return query.all()
|
||||
|
||||
|
||||
def delete_registered_contract(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
registered_contract_id: uuid.UUID,
|
||||
) -> RegisteredContract:
|
||||
"""
|
||||
Delete a registered contract
|
||||
"""
|
||||
try:
|
||||
registered_contract = (
|
||||
db_session.query(RegisteredContract)
|
||||
.filter(RegisteredContract.moonstream_user_id == moonstream_user_id)
|
||||
.filter(RegisteredContract.id == registered_contract_id)
|
||||
.one()
|
||||
)
|
||||
|
||||
db_session.delete(registered_contract)
|
||||
db_session.commit()
|
||||
except Exception as err:
|
||||
db_session.rollback()
|
||||
logger.error(repr(err))
|
||||
raise
|
||||
|
||||
return registered_contract
|
||||
|
||||
|
||||
def request_calls(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
registered_contract_id: Optional[uuid.UUID],
|
||||
contract_address: Optional[str],
|
||||
call_specs: List[data.CallSpecification],
|
||||
ttl_days: Optional[int] = None,
|
||||
) -> int:
|
||||
"""
|
||||
Batch creates call requests for the given registered contract.
|
||||
"""
|
||||
# TODO(zomglings): Do not pass raw ttl_days into SQL query - could be subject to SQL injection
|
||||
# For now, in the interest of speed, let us just be super cautious with ttl_days.
|
||||
# Check that the ttl_days is indeed an integer
|
||||
if registered_contract_id is None and contract_address is None:
|
||||
raise ValueError(
|
||||
"At least one of registered_contract_id or contract_address is required"
|
||||
)
|
||||
|
||||
if ttl_days is not None:
|
||||
assert ttl_days == int(ttl_days), "ttl_days must be an integer"
|
||||
if ttl_days <= 0:
|
||||
raise ValueError("ttl_days must be positive")
|
||||
|
||||
# Check that the moonstream_user_id matches a RegisteredContract with the given id or address
|
||||
query = db_session.query(RegisteredContract).filter(
|
||||
RegisteredContract.moonstream_user_id == moonstream_user_id
|
||||
)
|
||||
|
||||
if registered_contract_id is not None:
|
||||
query = query.filter(RegisteredContract.id == registered_contract_id)
|
||||
|
||||
if contract_address is not None:
|
||||
query = query.filter(
|
||||
RegisteredContract.address == Web3.toChecksumAddress(contract_address)
|
||||
)
|
||||
|
||||
try:
|
||||
registered_contract = query.one()
|
||||
except NoResultFound:
|
||||
raise ValueError("Invalid registered_contract_id or moonstream_user_id")
|
||||
|
||||
# Normalize the caller argument using Web3.toChecksumAddress
|
||||
contract_type = ContractType(registered_contract.contract_type)
|
||||
for specification in call_specs:
|
||||
normalized_caller = Web3.toChecksumAddress(specification.caller)
|
||||
|
||||
# Validate the method and parameters for the contract_type
|
||||
try:
|
||||
validate_method_and_params(
|
||||
contract_type, specification.method, specification.parameters
|
||||
)
|
||||
except InvalidAddressFormat as err:
|
||||
raise InvalidAddressFormat(err)
|
||||
except Exception as err:
|
||||
logger.error(
|
||||
f"Unhandled error occurred during methods and parameters validation, err: {err}"
|
||||
)
|
||||
|
||||
expires_at = None
|
||||
if ttl_days is not None:
|
||||
expires_at = func.now() + timedelta(days=ttl_days)
|
||||
|
||||
request = CallRequest(
|
||||
registered_contract_id=registered_contract.id,
|
||||
caller=normalized_caller,
|
||||
moonstream_user_id=moonstream_user_id,
|
||||
method=specification.method,
|
||||
parameters=specification.parameters,
|
||||
expires_at=expires_at,
|
||||
)
|
||||
|
||||
db_session.add(request)
|
||||
# Insert the new rows into the database in a single transaction
|
||||
try:
|
||||
db_session.commit()
|
||||
except Exception as e:
|
||||
db_session.rollback()
|
||||
raise e
|
||||
|
||||
return len(call_specs)
|
||||
|
||||
|
||||
def get_call_requests(
|
||||
db_session: Session,
|
||||
request_id: uuid.UUID,
|
||||
) -> data.CallRequest:
|
||||
"""
|
||||
Get call request by ID.
|
||||
"""
|
||||
results = (
|
||||
db_session.query(CallRequest, RegisteredContract)
|
||||
.join(
|
||||
RegisteredContract,
|
||||
CallRequest.registered_contract_id == RegisteredContract.id,
|
||||
)
|
||||
.filter(CallRequest.id == request_id)
|
||||
.all()
|
||||
)
|
||||
if len(results) == 0:
|
||||
raise CallRequestNotFound("Call request with given ID not found")
|
||||
elif len(results) != 1:
|
||||
raise Exception(
|
||||
f"Incorrect number of results found for moonstream_user_id {moonstream_user_id} and request_id {request_id}"
|
||||
)
|
||||
return data.CallRequest(
|
||||
contract_address=results[0][1].address, **results[0][0].__dict__
|
||||
)
|
||||
|
||||
|
||||
def list_call_requests(
|
||||
db_session: Session,
|
||||
contract_id: Optional[uuid.UUID],
|
||||
contract_address: Optional[str],
|
||||
caller: Optional[str],
|
||||
limit: int = 10,
|
||||
offset: Optional[int] = None,
|
||||
show_expired: bool = False,
|
||||
) -> List[data.CallRequest]:
|
||||
"""
|
||||
List call requests for the given moonstream_user_id
|
||||
"""
|
||||
if caller is None:
|
||||
raise ValueError("caller must be specified")
|
||||
|
||||
if contract_id is None and contract_address is None:
|
||||
raise ValueError(
|
||||
"At least one of contract_id or contract_address must be specified"
|
||||
)
|
||||
|
||||
# If show_expired is False, filter out expired requests using current time on database server
|
||||
query = (
|
||||
db_session.query(CallRequest, RegisteredContract)
|
||||
.join(
|
||||
RegisteredContract,
|
||||
CallRequest.registered_contract_id == RegisteredContract.id,
|
||||
)
|
||||
.filter(CallRequest.caller == Web3.toChecksumAddress(caller))
|
||||
)
|
||||
|
||||
if contract_id is not None:
|
||||
query = query.filter(CallRequest.registered_contract_id == contract_id)
|
||||
|
||||
if contract_address is not None:
|
||||
query = query.filter(
|
||||
RegisteredContract.address == Web3.toChecksumAddress(contract_address)
|
||||
)
|
||||
|
||||
if not show_expired:
|
||||
query = query.filter(
|
||||
CallRequest.expires_at > func.now(),
|
||||
)
|
||||
|
||||
if offset is not None:
|
||||
query = query.offset(offset)
|
||||
|
||||
query = query.limit(limit)
|
||||
results = query.all()
|
||||
return [
|
||||
data.CallRequest(
|
||||
contract_address=registered_contract.address, **call_request.__dict__
|
||||
)
|
||||
for call_request, registered_contract in results
|
||||
]
|
||||
|
||||
|
||||
# TODO(zomglings): What should the delete functionality for call requests look like?
|
||||
# - Delete expired requests for a given caller?
|
||||
# - Delete all requests for a given caller?
|
||||
# - Delete all requests for a given contract?
|
||||
# - Delete request by ID?
|
||||
# Should we implement these all using a single delete method, or a different method for each
|
||||
# use case?
|
||||
# Will come back to this once API is live.
|
||||
|
||||
|
||||
def delete_requests(
|
||||
db_session: Session,
|
||||
moonstream_user_id: uuid.UUID,
|
||||
request_ids: List[uuid.UUID] = [],
|
||||
) -> int:
|
||||
"""
|
||||
Delete a requests.
|
||||
"""
|
||||
try:
|
||||
requests_to_delete_query = (
|
||||
db_session.query(CallRequest)
|
||||
.filter(CallRequest.moonstream_user_id == moonstream_user_id)
|
||||
.filter(CallRequest.id.in_(request_ids))
|
||||
)
|
||||
requests_to_delete_num: int = requests_to_delete_query.delete(
|
||||
synchronize_session=False
|
||||
)
|
||||
db_session.commit()
|
||||
except Exception as err:
|
||||
db_session.rollback()
|
||||
logger.error(repr(err))
|
||||
raise Exception("Failed to delete call requests")
|
||||
|
||||
return requests_to_delete_num
|
||||
|
||||
|
||||
def handle_register(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handles the register command.
|
||||
"""
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
contract = register_contract(
|
||||
db_session=db_session,
|
||||
blockchain=args.blockchain,
|
||||
address=args.address,
|
||||
contract_type=args.contract_type,
|
||||
moonstream_user_id=args.user_id,
|
||||
title=args.title,
|
||||
description=args.description,
|
||||
image_uri=args.image_uri,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(err)
|
||||
return
|
||||
print(contract.json())
|
||||
|
||||
|
||||
def handle_list(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handles the list command.
|
||||
"""
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
contracts = lookup_registered_contracts(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=args.user_id,
|
||||
blockchain=args.blockchain,
|
||||
address=args.address,
|
||||
contract_type=args.contract_type,
|
||||
limit=args.limit,
|
||||
offset=args.offset,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(err)
|
||||
return
|
||||
|
||||
print(json.dumps([contract.dict() for contract in contracts]))
|
||||
|
||||
|
||||
def handle_delete(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handles the delete command.
|
||||
"""
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
deleted_contract = delete_registered_contract(
|
||||
db_session=db_session,
|
||||
registered_contract_id=args.id,
|
||||
moonstream_user_id=args.user_id,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(err)
|
||||
return
|
||||
|
||||
print(deleted_contract.json())
|
||||
|
||||
|
||||
def handle_request_calls(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handles the request-calls command.
|
||||
|
||||
Reads a file of JSON-formatted call specifications from `args.call_specs`,
|
||||
validates them, and adds them to the call_requests table in the Engine database.
|
||||
|
||||
:param args: The arguments passed to the CLI command.
|
||||
"""
|
||||
with args.call_specs as ifp:
|
||||
try:
|
||||
call_specs_raw = json.load(ifp)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load call specs: {e}")
|
||||
return
|
||||
|
||||
call_specs = [data.CallSpecification(**spec) for spec in call_specs_raw]
|
||||
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
request_calls(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=args.moonstream_user_id,
|
||||
registered_contract_id=args.registered_contract_id,
|
||||
call_specs=call_specs,
|
||||
ttl_days=args.ttl_days,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to request calls: {e}")
|
||||
return
|
||||
|
||||
|
||||
def handle_list_requests(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handles the requests command.
|
||||
|
||||
:param args: The arguments passed to the CLI command.
|
||||
"""
|
||||
try:
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
call_requests = list_call_requests(
|
||||
db_session=db_session,
|
||||
contract_id=args.registered_contract_id,
|
||||
caller=args.caller,
|
||||
limit=args.limit,
|
||||
offset=args.offset,
|
||||
show_expired=args.show_expired,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list call requests: {e}")
|
||||
return
|
||||
|
||||
print(json.dumps([request.dict() for request in call_requests]))
|
||||
|
||||
|
||||
def generate_cli() -> argparse.ArgumentParser:
|
||||
"""
|
||||
Generates a CLI which can be used to manage registered contracts on an Engine instance.
|
||||
"""
|
||||
parser = argparse.ArgumentParser(description="Manage registered contracts")
|
||||
parser.set_defaults(func=lambda _: parser.print_help())
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
register_usage = "Register a new contract"
|
||||
register_parser = subparsers.add_parser(
|
||||
"register", help=register_usage, description=register_usage
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-b",
|
||||
"--blockchain",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The blockchain the contract is deployed on",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-a",
|
||||
"--address",
|
||||
type=str,
|
||||
required=True,
|
||||
help="The address of the contract",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-c",
|
||||
"--contract-type",
|
||||
type=ContractType,
|
||||
choices=ContractType,
|
||||
required=True,
|
||||
help="The type of the contract",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-u",
|
||||
"--user-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the Moonstream user under whom to register the contract",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-t",
|
||||
"--title",
|
||||
type=str,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The title of the contract",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-d",
|
||||
"--description",
|
||||
type=str,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The description of the contract",
|
||||
)
|
||||
register_parser.add_argument(
|
||||
"-i",
|
||||
"--image-uri",
|
||||
type=str,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The image URI of the contract",
|
||||
)
|
||||
register_parser.set_defaults(func=handle_register)
|
||||
|
||||
list_contracts_usage = "List all contracts matching certain criteria"
|
||||
list_contracts_parser = subparsers.add_parser(
|
||||
"list", help=list_contracts_usage, description=list_contracts_usage
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-b",
|
||||
"--blockchain",
|
||||
type=str,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The blockchain the contract is deployed on",
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-a",
|
||||
"--address",
|
||||
type=str,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The address of the contract",
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-c",
|
||||
"--contract-type",
|
||||
type=ContractType,
|
||||
choices=ContractType,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The type of the contract",
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-u",
|
||||
"--user-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the Moonstream user whose contracts to list",
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-N",
|
||||
"--limit",
|
||||
type=int,
|
||||
required=False,
|
||||
default=10,
|
||||
help="The number of contracts to return",
|
||||
)
|
||||
list_contracts_parser.add_argument(
|
||||
"-n",
|
||||
"--offset",
|
||||
type=int,
|
||||
required=False,
|
||||
default=0,
|
||||
help="The offset to start returning contracts from",
|
||||
)
|
||||
list_contracts_parser.set_defaults(func=handle_list)
|
||||
|
||||
delete_usage = "Delete a registered contract from an Engine instance"
|
||||
delete_parser = subparsers.add_parser(
|
||||
"delete", help=delete_usage, description=delete_usage
|
||||
)
|
||||
delete_parser.add_argument(
|
||||
"--id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the contract to delete",
|
||||
)
|
||||
delete_parser.add_argument(
|
||||
"-u",
|
||||
"--user-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the Moonstream user whose contract to delete",
|
||||
)
|
||||
delete_parser.set_defaults(func=handle_delete)
|
||||
|
||||
request_calls_usage = "Create call requests for a registered contract"
|
||||
request_calls_parser = subparsers.add_parser(
|
||||
"request-calls", help=request_calls_usage, description=request_calls_usage
|
||||
)
|
||||
request_calls_parser.add_argument(
|
||||
"-i",
|
||||
"--registered-contract-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the registered contract to create call requests for",
|
||||
)
|
||||
request_calls_parser.add_argument(
|
||||
"-u",
|
||||
"--moonstream-user-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the Moonstream user who owns the contract",
|
||||
)
|
||||
request_calls_parser.add_argument(
|
||||
"-c",
|
||||
"--calls",
|
||||
type=argparse.FileType("r"),
|
||||
required=True,
|
||||
help="Path to the JSON file with call specifications",
|
||||
)
|
||||
request_calls_parser.add_argument(
|
||||
"-t",
|
||||
"--ttl-days",
|
||||
type=int,
|
||||
required=False,
|
||||
default=None,
|
||||
help="The number of days until the call requests expire",
|
||||
)
|
||||
request_calls_parser.set_defaults(func=handle_request_calls)
|
||||
|
||||
list_requests_usage = "List requests for calls on a registered contract"
|
||||
list_requests_parser = subparsers.add_parser(
|
||||
"requests", help=list_requests_usage, description=list_requests_usage
|
||||
)
|
||||
list_requests_parser.add_argument(
|
||||
"-i",
|
||||
"--registered-contract-id",
|
||||
type=uuid.UUID,
|
||||
required=True,
|
||||
help="The ID of the registered contract to list call requests for",
|
||||
)
|
||||
list_requests_parser.add_argument(
|
||||
"-c",
|
||||
"--caller",
|
||||
type=Web3.toChecksumAddress,
|
||||
required=True,
|
||||
help="Caller's address",
|
||||
)
|
||||
list_requests_parser.add_argument(
|
||||
"-N",
|
||||
"--limit",
|
||||
type=int,
|
||||
required=False,
|
||||
default=10,
|
||||
help="The number of call requests to return",
|
||||
)
|
||||
list_requests_parser.add_argument(
|
||||
"-n",
|
||||
"--offset",
|
||||
type=int,
|
||||
required=False,
|
||||
default=0,
|
||||
help="The offset to start returning contracts from",
|
||||
)
|
||||
list_requests_parser.add_argument(
|
||||
"--show-expired",
|
||||
action="store_true",
|
||||
help="Set this flag to also show expired requests. Default behavior is to hide these.",
|
||||
)
|
||||
list_requests_parser.set_defaults(func=handle_list_requests)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = generate_cli()
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,307 @@
|
|||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import BaseModel, Field, root_validator, validator
|
||||
from web3 import Web3
|
||||
|
||||
|
||||
class PingResponse(BaseModel):
|
||||
"""
|
||||
Schema for ping response
|
||||
"""
|
||||
|
||||
status: str
|
||||
|
||||
|
||||
class NowResponse(BaseModel):
|
||||
"""
|
||||
Schema for responses on /now endpoint
|
||||
"""
|
||||
|
||||
epoch_time: float
|
||||
|
||||
|
||||
class SignerListResponse(BaseModel):
|
||||
instances: List[Any] = Field(default_factory=list)
|
||||
|
||||
|
||||
class SignerSleepResponse(BaseModel):
|
||||
instances: List[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class SignerWakeupResponse(BaseModel):
|
||||
instances: List[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DropperContractResponse(BaseModel):
|
||||
id: UUID
|
||||
address: str
|
||||
blockchain: str
|
||||
title: Optional[str]
|
||||
description: Optional[str]
|
||||
image_uri: Optional[str]
|
||||
|
||||
|
||||
class DropperTerminusResponse(BaseModel):
|
||||
terminus_address: str
|
||||
terminus_pool_id: int
|
||||
blockchain: str
|
||||
|
||||
|
||||
class DropperBlockchainResponse(BaseModel):
|
||||
blockchain: str
|
||||
|
||||
|
||||
class DropRegisterRequest(BaseModel):
|
||||
dropper_contract_id: UUID
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
claim_block_deadline: Optional[int] = None
|
||||
terminus_address: Optional[str] = None
|
||||
terminus_pool_id: Optional[int] = None
|
||||
claim_id: Optional[int] = None
|
||||
|
||||
|
||||
class DropCreatedResponse(BaseModel):
|
||||
dropper_claim_id: UUID
|
||||
dropper_contract_id: UUID
|
||||
title: str
|
||||
description: str
|
||||
claim_block_deadline: Optional[int] = None
|
||||
terminus_address: Optional[str] = None
|
||||
terminus_pool_id: Optional[int] = None
|
||||
claim_id: Optional[int] = None
|
||||
|
||||
|
||||
class Claimant(BaseModel):
|
||||
address: str
|
||||
amount: int
|
||||
raw_amount: Optional[str] = None
|
||||
|
||||
|
||||
class BatchAddClaimantsRequest(BaseModel):
|
||||
claimants: List[Claimant] = Field(default_factory=list)
|
||||
|
||||
|
||||
class BatchRemoveClaimantsRequest(BaseModel):
|
||||
claimants: List[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DropAddClaimantsRequest(BaseModel):
|
||||
dropper_claim_id: UUID
|
||||
claimants: List[Claimant] = Field(default_factory=list)
|
||||
|
||||
|
||||
class ClaimantsResponse(BaseModel):
|
||||
claimants: List[Claimant] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DropRemoveClaimantsRequest(BaseModel):
|
||||
dropper_claim_id: UUID
|
||||
addresses: List[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class RemoveClaimantsResponse(BaseModel):
|
||||
addresses: List[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DropperClaimResponse(BaseModel):
|
||||
id: UUID
|
||||
dropper_contract_id: UUID
|
||||
title: str
|
||||
description: str
|
||||
active: bool
|
||||
claim_block_deadline: Optional[int] = None
|
||||
terminus_address: Optional[str] = None
|
||||
terminus_pool_id: Optional[int] = None
|
||||
claim_id: Optional[int] = None
|
||||
|
||||
|
||||
class DropResponse(BaseModel):
|
||||
claimant: str
|
||||
claim_id: int
|
||||
amount: str
|
||||
block_deadline: int
|
||||
signature: str
|
||||
title: str
|
||||
description: str
|
||||
|
||||
|
||||
class DropBatchResponseItem(BaseModel):
|
||||
claimant: str
|
||||
claim_id: int
|
||||
title: str
|
||||
description: str
|
||||
amount: int
|
||||
amount_string: str
|
||||
block_deadline: int
|
||||
signature: str
|
||||
dropper_claim_id: UUID
|
||||
dropper_contract_address: str
|
||||
blockchain: str
|
||||
|
||||
|
||||
class DropListResponse(BaseModel):
|
||||
drops: List[Any] = Field(default_factory=list)
|
||||
|
||||
|
||||
class DropClaimant(BaseModel):
|
||||
amount: Optional[int]
|
||||
added_by: Optional[str]
|
||||
address: Optional[str]
|
||||
|
||||
|
||||
class DropActivateRequest(BaseModel):
|
||||
dropper_claim_id: UUID
|
||||
|
||||
|
||||
class DropUpdateRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
claim_block_deadline: Optional[int] = None
|
||||
terminus_address: Optional[str] = None
|
||||
terminus_pool_id: Optional[int] = None
|
||||
claim_id: Optional[int] = None
|
||||
|
||||
|
||||
class DropUpdatedResponse(BaseModel):
|
||||
dropper_claim_id: UUID
|
||||
dropper_contract_id: UUID
|
||||
title: str
|
||||
description: str
|
||||
claim_block_deadline: Optional[int] = None
|
||||
terminus_address: Optional[str] = None
|
||||
terminus_pool_id: Optional[int] = None
|
||||
claim_id: Optional[int] = None
|
||||
active: bool = True
|
||||
|
||||
|
||||
class ContractType(Enum):
|
||||
raw = "raw"
|
||||
dropper = "dropper-v0.2.0"
|
||||
|
||||
|
||||
class RegisterContractRequest(BaseModel):
|
||||
blockchain: str
|
||||
address: str
|
||||
contract_type: ContractType
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
image_uri: Optional[str] = None
|
||||
|
||||
|
||||
class UpdateContractRequest(BaseModel):
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
image_uri: Optional[str] = None
|
||||
ignore_nulls: bool = True
|
||||
|
||||
|
||||
class RegisteredContract(BaseModel):
|
||||
id: UUID
|
||||
blockchain: str
|
||||
address: str
|
||||
contract_type: str
|
||||
moonstream_user_id: UUID
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
image_uri: Optional[str] = None
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
@validator("id", "moonstream_user_id")
|
||||
def validate_uuids(cls, v):
|
||||
return str(v)
|
||||
|
||||
@validator("created_at", "updated_at")
|
||||
def validate_datetimes(cls, v):
|
||||
return v.isoformat()
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
|
||||
class CallSpecification(BaseModel):
|
||||
caller: str
|
||||
method: str
|
||||
parameters: Dict[str, Any]
|
||||
|
||||
@validator("caller")
|
||||
def validate_web3_addresses(cls, v):
|
||||
return Web3.toChecksumAddress(v)
|
||||
|
||||
|
||||
class CreateCallRequestsAPIRequest(BaseModel):
|
||||
contract_id: Optional[UUID] = None
|
||||
contract_address: Optional[str] = None
|
||||
specifications: List[CallSpecification] = Field(default_factory=list)
|
||||
ttl_days: Optional[int] = None
|
||||
|
||||
# Solution found thanks to https://github.com/pydantic/pydantic/issues/506
|
||||
@root_validator
|
||||
def at_least_one_of_contract_id_and_contract_address(cls, values):
|
||||
if values.get("contract_id") is None and values.get("contract_address") is None:
|
||||
raise ValueError(
|
||||
"At least one of contract_id and contract_address must be provided"
|
||||
)
|
||||
return values
|
||||
|
||||
|
||||
class CallRequest(BaseModel):
|
||||
id: UUID
|
||||
contract_id: UUID = Field(alias="registered_contract_id")
|
||||
contract_address: Optional[str] = None
|
||||
moonstream_user_id: UUID
|
||||
caller: str
|
||||
method: str
|
||||
parameters: Dict[str, Any]
|
||||
expires_at: Optional[datetime]
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
class Config:
|
||||
orm_mode = True
|
||||
|
||||
@validator("id", "contract_id", "moonstream_user_id")
|
||||
def validate_uuids(cls, v):
|
||||
return str(v)
|
||||
|
||||
@validator("created_at", "updated_at", "expires_at")
|
||||
def validate_datetimes(cls, v):
|
||||
if v is not None:
|
||||
return v.isoformat()
|
||||
|
||||
@validator("contract_address", "caller")
|
||||
def validate_web3_adresses(cls, v):
|
||||
return Web3.toChecksumAddress(v)
|
||||
|
||||
|
||||
class QuartilesResponse(BaseModel):
|
||||
percentile_25: Dict[str, Any]
|
||||
percentile_50: Dict[str, Any]
|
||||
percentile_75: Dict[str, Any]
|
||||
|
||||
|
||||
class CountAddressesResponse(BaseModel):
|
||||
count: int = Field(default_factory=int)
|
||||
|
||||
|
||||
class Score(BaseModel):
|
||||
address: str
|
||||
score: int
|
||||
points_data: Dict[str, Any]
|
||||
|
||||
|
||||
class LeaderboardPosition(BaseModel):
|
||||
address: str
|
||||
rank: int
|
||||
score: int
|
||||
points_data: Dict[str, Any]
|
||||
|
||||
|
||||
class RanksResponse(BaseModel):
|
||||
rank: int
|
||||
score: int
|
||||
size: int
|
|
@ -0,0 +1,84 @@
|
|||
"""
|
||||
Engine database connection.
|
||||
"""
|
||||
from contextlib import contextmanager
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import sessionmaker, Session
|
||||
|
||||
from .settings import (
|
||||
ENGINE_DB_URI,
|
||||
ENGINE_DB_URI_READ_ONLY,
|
||||
ENGINE_POOL_SIZE,
|
||||
ENGINE_DB_STATEMENT_TIMEOUT_MILLIS,
|
||||
ENGINE_DB_POOL_RECYCLE_SECONDS,
|
||||
)
|
||||
|
||||
|
||||
def create_local_engine(
|
||||
url: Optional[str],
|
||||
pool_size: int,
|
||||
statement_timeout: int,
|
||||
pool_recycle: int,
|
||||
):
|
||||
# Pooling: https://docs.sqlalchemy.org/en/14/core/pooling.html#sqlalchemy.pool.QueuePool
|
||||
# Statement timeout: https://stackoverflow.com/a/44936982
|
||||
return create_engine(
|
||||
url=url,
|
||||
pool_size=pool_size,
|
||||
pool_recycle=pool_recycle,
|
||||
connect_args={"options": f"-c statement_timeout={statement_timeout}"},
|
||||
)
|
||||
|
||||
|
||||
engine = create_local_engine(
|
||||
url=ENGINE_DB_URI,
|
||||
pool_size=ENGINE_POOL_SIZE,
|
||||
statement_timeout=ENGINE_DB_STATEMENT_TIMEOUT_MILLIS,
|
||||
pool_recycle=ENGINE_DB_POOL_RECYCLE_SECONDS,
|
||||
)
|
||||
|
||||
SessionLocal = sessionmaker(bind=engine)
|
||||
|
||||
|
||||
def yield_db_session() -> Session:
|
||||
"""
|
||||
Yields a database connection (created using environment variables).
|
||||
As per FastAPI docs:
|
||||
https://fastapi.tiangolo.com/tutorial/sql-databases/#create-a-dependency
|
||||
"""
|
||||
session = SessionLocal()
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
yield_db_session_ctx = contextmanager(yield_db_session)
|
||||
|
||||
# Read only connection
|
||||
RO_engine = create_local_engine(
|
||||
url=ENGINE_DB_URI_READ_ONLY,
|
||||
pool_size=ENGINE_POOL_SIZE,
|
||||
statement_timeout=ENGINE_DB_STATEMENT_TIMEOUT_MILLIS,
|
||||
pool_recycle=ENGINE_DB_POOL_RECYCLE_SECONDS,
|
||||
)
|
||||
|
||||
RO_SessionLocal = sessionmaker(bind=RO_engine)
|
||||
|
||||
|
||||
def yield_db_read_only_session() -> Session:
|
||||
"""
|
||||
Yields read only database connection (created using environment variables).
|
||||
As per FastAPI docs:
|
||||
https://fastapi.tiangolo.com/tutorial/sql-databases/#create-a-dependency
|
||||
"""
|
||||
session = RO_SessionLocal()
|
||||
try:
|
||||
yield session
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
yield_db_read_only_session_ctx = contextmanager(yield_db_read_only_session)
|
|
@ -0,0 +1,201 @@
|
|||
import base64
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Awaitable, Callable, Dict, Optional
|
||||
|
||||
from bugout.data import BugoutUser
|
||||
from bugout.exceptions import BugoutResponseException
|
||||
from fastapi import HTTPException, Request, Response
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from web3 import Web3
|
||||
|
||||
from .auth import (
|
||||
MoonstreamAuthorizationExpired,
|
||||
MoonstreamAuthorizationVerificationError,
|
||||
verify,
|
||||
)
|
||||
from .settings import bugout_client as bc, MOONSTREAM_APPLICATION_ID
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BroodAuthMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Checks the authorization header on the request. If it represents a verified Brood user,
|
||||
create another request and get groups user belongs to, after this
|
||||
adds a brood_user attribute to the request.state. Otherwise raises a 403 error.
|
||||
|
||||
Taken almost verbatim from the Moonstream repo:
|
||||
https://github.com/bugout-dev/moonstream/blob/99504a431acdd903259d1c4014a2808ce5a104c1/backend/moonstreamapi/middleware.py
|
||||
"""
|
||||
|
||||
def __init__(self, app, whitelist: Optional[Dict[str, str]] = None):
|
||||
self.whitelist: Dict[str, str] = {}
|
||||
if whitelist is not None:
|
||||
self.whitelist = whitelist
|
||||
super().__init__(app)
|
||||
|
||||
async def dispatch(
|
||||
self, request: Request, call_next: Callable[[Request], Awaitable[Response]]
|
||||
):
|
||||
# Filter out endpoints with proper method to work without Bearer token (as create_user, login, etc)
|
||||
path = request.url.path.rstrip("/")
|
||||
method = request.method
|
||||
if path in self.whitelist.keys() and self.whitelist[path] == method:
|
||||
return await call_next(request)
|
||||
|
||||
authorization_header = request.headers.get("authorization")
|
||||
if authorization_header is None:
|
||||
return Response(
|
||||
status_code=403, content="No authorization header passed with request"
|
||||
)
|
||||
user_token_list = authorization_header.split()
|
||||
if len(user_token_list) != 2:
|
||||
return Response(status_code=403, content="Wrong authorization header")
|
||||
user_token: str = user_token_list[-1]
|
||||
|
||||
try:
|
||||
user: BugoutUser = bc.get_user(user_token)
|
||||
if not user.verified:
|
||||
logger.info(
|
||||
f"Attempted journal access by unverified Brood account: {user.id}"
|
||||
)
|
||||
return Response(
|
||||
status_code=403,
|
||||
content="Only verified accounts can access journals",
|
||||
)
|
||||
if str(user.application_id) != str(MOONSTREAM_APPLICATION_ID):
|
||||
return Response(
|
||||
status_code=403, content="User does not belong to this application"
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
return Response(status_code=e.status_code, content=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing Brood response: {str(e)}")
|
||||
return Response(status_code=500, content="Internal server error")
|
||||
|
||||
request.state.user = user
|
||||
request.state.token = user_token
|
||||
return await call_next(request)
|
||||
|
||||
|
||||
class EngineAuthMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Checks the authorization header on the request. It it represents
|
||||
a correctly signer message, adds address and deadline attributes to the request.state.
|
||||
Otherwise raises a 403 error.
|
||||
"""
|
||||
|
||||
def __init__(self, app, whitelist: Optional[Dict[str, str]] = None):
|
||||
self.whitelist: Dict[str, str] = {}
|
||||
if whitelist is not None:
|
||||
self.whitelist = whitelist
|
||||
super().__init__(app)
|
||||
|
||||
async def dispatch(
|
||||
self, request: Request, call_next: Callable[[Request], Awaitable[Response]]
|
||||
):
|
||||
# Filter out whitelisted endpoints without web3 authorization
|
||||
path = request.url.path.rstrip("/")
|
||||
method = request.method
|
||||
|
||||
if path in self.whitelist.keys() and self.whitelist[path] == method:
|
||||
return await call_next(request)
|
||||
|
||||
raw_authorization_header = request.headers.get("authorization")
|
||||
|
||||
if raw_authorization_header is None:
|
||||
return Response(
|
||||
status_code=403, content="No authorization header passed with request"
|
||||
)
|
||||
|
||||
authorization_header_components = raw_authorization_header.split()
|
||||
if (
|
||||
len(authorization_header_components) != 2
|
||||
or authorization_header_components[0].lower() != "moonstream"
|
||||
):
|
||||
return Response(
|
||||
status_code=403,
|
||||
content="Incorrect format for authorization header. Expected 'Authorization: moonstream <base64_encoded_json_payload>'",
|
||||
)
|
||||
|
||||
try:
|
||||
json_payload_str = base64.b64decode(
|
||||
authorization_header_components[-1]
|
||||
).decode("utf-8")
|
||||
|
||||
json_payload = json.loads(json_payload_str)
|
||||
verified = verify(json_payload)
|
||||
address = json_payload.get("address")
|
||||
if address is not None:
|
||||
address = Web3.toChecksumAddress(address)
|
||||
else:
|
||||
raise Exception("Address in payload is None")
|
||||
except MoonstreamAuthorizationVerificationError as e:
|
||||
logger.info("Moonstream authorization verification error: %s", e)
|
||||
return Response(status_code=403, content="Invalid authorization header")
|
||||
except MoonstreamAuthorizationExpired as e:
|
||||
logger.info("Moonstream authorization expired: %s", e)
|
||||
return Response(status_code=403, content="Authorization expired")
|
||||
except Exception as e:
|
||||
logger.error("Unexpected exception: %s", e)
|
||||
return Response(status_code=500, content="Internal server error")
|
||||
|
||||
request.state.address = address
|
||||
request.state.verified = verified
|
||||
|
||||
return await call_next(request)
|
||||
|
||||
|
||||
class EngineHTTPException(HTTPException):
|
||||
"""
|
||||
Extended HTTPException to handle 500 Internal server errors
|
||||
and send crash reports.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
status_code: int,
|
||||
detail: Any = None,
|
||||
headers: Optional[Dict[str, Any]] = None,
|
||||
internal_error: Exception = None,
|
||||
):
|
||||
super().__init__(status_code, detail, headers)
|
||||
if internal_error is not None:
|
||||
print(internal_error)
|
||||
# reporter.error_report(internal_error)
|
||||
|
||||
|
||||
class ExtractBearerTokenMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Checks the authorization header on the request and extract token.
|
||||
"""
|
||||
|
||||
def __init__(self, app, whitelist: Optional[Dict[str, str]] = None):
|
||||
self.whitelist: Dict[str, str] = {}
|
||||
if whitelist is not None:
|
||||
self.whitelist = whitelist
|
||||
super().__init__(app)
|
||||
|
||||
async def dispatch(
|
||||
self, request: Request, call_next: Callable[[Request], Awaitable[Response]]
|
||||
):
|
||||
# Filter out endpoints with proper method to work without Bearer token (as create_user, login, etc)
|
||||
path = request.url.path.rstrip("/")
|
||||
method = request.method
|
||||
if path in self.whitelist.keys() and self.whitelist[path] == method:
|
||||
return await call_next(request)
|
||||
|
||||
authorization_header = request.headers.get("authorization")
|
||||
if authorization_header is None:
|
||||
return Response(
|
||||
status_code=403, content="No authorization header passed with request"
|
||||
)
|
||||
authorization_header_components = authorization_header.split()
|
||||
if len(authorization_header_components) != 2:
|
||||
return Response(status_code=403, content="Wrong authorization header")
|
||||
user_token: str = authorization_header_components[-1]
|
||||
|
||||
request.state.token = user_token
|
||||
|
||||
return await call_next(request)
|
|
@ -0,0 +1,287 @@
|
|||
import uuid
|
||||
|
||||
from sqlalchemy import (
|
||||
VARCHAR,
|
||||
BigInteger,
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
ForeignKey,
|
||||
Index,
|
||||
MetaData,
|
||||
String,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import JSONB, UUID
|
||||
from sqlalchemy.ext.compiler import compiles
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.sql import and_, expression
|
||||
|
||||
"""
|
||||
Naming conventions doc
|
||||
https://docs.sqlalchemy.org/en/13/core/constraints.html#configuring-constraint-naming-conventions
|
||||
"""
|
||||
convention = {
|
||||
"ix": "ix_%(column_0_label)s",
|
||||
"uq": "uq_%(table_name)s_%(column_0_name)s",
|
||||
"ck": "ck_%(table_name)s_%(constraint_name)s",
|
||||
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
|
||||
"pk": "pk_%(table_name)s",
|
||||
}
|
||||
metadata = MetaData(naming_convention=convention)
|
||||
Base = declarative_base(metadata=metadata)
|
||||
|
||||
"""
|
||||
Creating a utcnow function which runs on the Posgres database server when created_at and updated_at
|
||||
fields are populated.
|
||||
Following:
|
||||
1. https://docs.sqlalchemy.org/en/13/core/compiler.html#utc-timestamp-function
|
||||
2. https://www.postgresql.org/docs/current/functions-datetime.html#FUNCTIONS-DATETIME-CURRENT
|
||||
3. https://stackoverflow.com/a/33532154/13659585
|
||||
"""
|
||||
|
||||
|
||||
class utcnow(expression.FunctionElement):
|
||||
type = DateTime
|
||||
|
||||
|
||||
@compiles(utcnow, "postgresql")
|
||||
def pg_utcnow(element, compiler, **kwargs):
|
||||
return "TIMEZONE('utc', statement_timestamp())"
|
||||
|
||||
|
||||
class DropperContract(Base): # type: ignore
|
||||
__tablename__ = "dropper_contracts"
|
||||
__table_args__ = (UniqueConstraint("blockchain", "address"),)
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
blockchain = Column(VARCHAR(128), nullable=False)
|
||||
address = Column(VARCHAR(256), index=True)
|
||||
title = Column(VARCHAR(128), nullable=True)
|
||||
description = Column(String, nullable=True)
|
||||
image_uri = Column(String, nullable=True)
|
||||
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class DropperClaim(Base): # type: ignore
|
||||
__tablename__ = "dropper_claims"
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
dropper_contract_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("dropper_contracts.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
claim_id = Column(BigInteger, nullable=True)
|
||||
title = Column(VARCHAR(128), nullable=True)
|
||||
description = Column(String, nullable=True)
|
||||
terminus_address = Column(VARCHAR(256), nullable=True, index=True)
|
||||
terminus_pool_id = Column(BigInteger, nullable=True, index=True)
|
||||
claim_block_deadline = Column(BigInteger, nullable=True)
|
||||
active = Column(Boolean, default=False, nullable=False)
|
||||
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"uq_dropper_claims_dropper_contract_id_claim_id",
|
||||
"dropper_contract_id",
|
||||
"claim_id",
|
||||
unique=True,
|
||||
postgresql_where=and_(claim_id.isnot(None), active.is_(True)),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class DropperClaimant(Base): # type: ignore
|
||||
__tablename__ = "dropper_claimants"
|
||||
__table_args__ = (UniqueConstraint("dropper_claim_id", "address"),)
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
dropper_claim_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("dropper_claims.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
address = Column(VARCHAR(256), nullable=False, index=True)
|
||||
amount = Column(BigInteger, nullable=False)
|
||||
raw_amount = Column(String, nullable=True)
|
||||
added_by = Column(VARCHAR(256), nullable=False, index=True)
|
||||
signature = Column(String, nullable=True, index=True)
|
||||
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class RegisteredContract(Base): # type: ignore
|
||||
__tablename__ = "registered_contracts"
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"blockchain",
|
||||
"moonstream_user_id",
|
||||
"address",
|
||||
"contract_type",
|
||||
),
|
||||
)
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
)
|
||||
blockchain = Column(VARCHAR(128), nullable=False, index=True)
|
||||
address = Column(VARCHAR(256), nullable=False, index=True)
|
||||
contract_type = Column(VARCHAR(128), nullable=False, index=True)
|
||||
title = Column(VARCHAR(128), nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
image_uri = Column(String, nullable=True)
|
||||
# User ID of the Moonstream user who registered this contract.
|
||||
moonstream_user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class CallRequest(Base):
|
||||
__tablename__ = "call_requests"
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
registered_contract_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("registered_contracts.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
caller = Column(VARCHAR(256), nullable=False, index=True)
|
||||
# User ID of the Moonstream user who requested this call.
|
||||
# For now, this duplicates the moonstream_user_id in the registered_contracts table. Nevertheless,
|
||||
# we keep this column here for auditing purposes. In the future, we will add a group_id column to
|
||||
# the registered_contracts table, and this column will be used to track the user from that group
|
||||
# who made each call request.
|
||||
moonstream_user_id = Column(UUID(as_uuid=True), nullable=False, index=True)
|
||||
method = Column(String, nullable=False, index=True)
|
||||
# TODO(zomglings): Should we conditional indices on parameters depending on the contract type?
|
||||
parameters = Column(JSONB, nullable=False)
|
||||
|
||||
expires_at = Column(DateTime(timezone=True), nullable=True, index=True)
|
||||
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class Leaderboard(Base): # type: ignore
|
||||
__tablename__ = "leaderboards"
|
||||
# __table_args__ = (UniqueConstraint("dropper_contract_id", "address"),)
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
title = Column(VARCHAR(128), nullable=False)
|
||||
description = Column(String, nullable=True)
|
||||
resource_id = Column(UUID(as_uuid=True), nullable=True, index=True)
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
||||
|
||||
|
||||
class LeaderboardScores(Base): # type: ignore
|
||||
__tablename__ = "leaderboard_scores"
|
||||
__table_args__ = (UniqueConstraint("leaderboard_id", "address"),)
|
||||
|
||||
id = Column(
|
||||
UUID(as_uuid=True),
|
||||
primary_key=True,
|
||||
default=uuid.uuid4,
|
||||
unique=True,
|
||||
nullable=False,
|
||||
)
|
||||
leaderboard_id = Column(
|
||||
UUID(as_uuid=True),
|
||||
ForeignKey("leaderboards.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
)
|
||||
address = Column(VARCHAR(256), nullable=False, index=True)
|
||||
score = Column(BigInteger, nullable=False)
|
||||
points_data = Column(JSONB, nullable=True)
|
||||
created_at = Column(
|
||||
DateTime(timezone=True), server_default=utcnow(), nullable=False
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
server_default=utcnow(),
|
||||
onupdate=utcnow(),
|
||||
nullable=False,
|
||||
)
|
|
@ -0,0 +1,520 @@
|
|||
"""
|
||||
Moonstream Engine Admin API.
|
||||
"""
|
||||
import logging
|
||||
from typing import Optional, Any, Dict
|
||||
from uuid import UUID
|
||||
|
||||
from web3 import Web3
|
||||
from fastapi import Body, FastAPI, Request, Depends, Query
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from .. import actions
|
||||
from .. import data
|
||||
from .. import db
|
||||
from ..middleware import EngineHTTPException, EngineAuthMiddleware
|
||||
from ..settings import DOCS_TARGET_PATH, ORIGINS
|
||||
from ..version import VERSION
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
tags_metadata = [{"name": "admin", "description": "Moonstream Engine Admin API"}]
|
||||
|
||||
whitelist_paths: Dict[str, str] = {}
|
||||
whitelist_paths.update(
|
||||
{
|
||||
"/admin/docs": "GET",
|
||||
"/admin/openapi.json": "GET",
|
||||
}
|
||||
)
|
||||
|
||||
app = FastAPI(
|
||||
title=f"Moonstream Engine Admin API",
|
||||
description="Moonstream Engine Admin API endpoints.",
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
redoc_url=f"/{DOCS_TARGET_PATH}",
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(EngineAuthMiddleware, whitelist=whitelist_paths)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/drops", response_model=data.DropListResponse)
|
||||
async def get_drop_list_handler(
|
||||
request: Request,
|
||||
blockchain: str,
|
||||
contract_address: str,
|
||||
drop_number: Optional[int] = Query(None),
|
||||
terminus_address: Optional[str] = Query(None),
|
||||
terminus_pool_id: Optional[int] = Query(None),
|
||||
active: Optional[bool] = Query(None),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of drops for a given dropper contract and drop number.
|
||||
"""
|
||||
|
||||
contract_address = Web3.toChecksumAddress(contract_address)
|
||||
|
||||
# try:
|
||||
# actions.ensure_contract_admin_token_holder(
|
||||
# blockchain, contract_address, request.state.address
|
||||
# )
|
||||
# except actions.AuthorizationError as e:
|
||||
# logger.error(e)
|
||||
# raise EngineHTTPException(status_code=403)
|
||||
# except NoResultFound:
|
||||
# raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
if terminus_address:
|
||||
terminus_address = Web3.toChecksumAddress(terminus_address)
|
||||
|
||||
try:
|
||||
results = actions.get_drops(
|
||||
db_session=db_session,
|
||||
dropper_contract_address=contract_address,
|
||||
blockchain=blockchain,
|
||||
drop_number=drop_number,
|
||||
terminus_address=terminus_address,
|
||||
terminus_pool_id=terminus_pool_id,
|
||||
active=active,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get drops. Failed with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claims")
|
||||
|
||||
return data.DropListResponse(drops=[result for result in results])
|
||||
|
||||
|
||||
@app.post("/drops", response_model=data.DropCreatedResponse)
|
||||
async def create_drop(
|
||||
request: Request,
|
||||
register_request: data.DropRegisterRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropCreatedResponse:
|
||||
|
||||
"""
|
||||
Create a drop for a given dropper contract.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_dropper_contract_owner(
|
||||
db_session, register_request.dropper_contract_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Dropper contract not found")
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
if register_request.terminus_address:
|
||||
register_request.terminus_address = Web3.toChecksumAddress(
|
||||
register_request.terminus_address
|
||||
)
|
||||
|
||||
try:
|
||||
claim = actions.create_claim(
|
||||
db_session=db_session,
|
||||
dropper_contract_id=register_request.dropper_contract_id,
|
||||
title=register_request.title,
|
||||
description=register_request.description,
|
||||
claim_block_deadline=register_request.claim_block_deadline,
|
||||
terminus_address=register_request.terminus_address,
|
||||
terminus_pool_id=register_request.terminus_pool_id,
|
||||
claim_id=register_request.claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Dropper contract not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't create claim: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't create claim")
|
||||
|
||||
return data.DropCreatedResponse(
|
||||
dropper_claim_id=claim.id,
|
||||
dropper_contract_id=claim.dropper_contract_id,
|
||||
title=claim.title,
|
||||
description=claim.description,
|
||||
claim_block_deadline=claim.claim_block_deadline,
|
||||
terminus_address=claim.terminus_address,
|
||||
terminus_pool_id=claim.terminus_pool_id,
|
||||
claim_id=claim.claim_id,
|
||||
)
|
||||
|
||||
|
||||
@app.put(
|
||||
"/drops/{dropper_claim_id}/activate",
|
||||
response_model=data.DropUpdatedResponse,
|
||||
)
|
||||
async def activate_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Activate a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.activate_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't activate drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't activate drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.put(
|
||||
"/drops/{dropper_claim_id}/deactivate",
|
||||
response_model=data.DropUpdatedResponse,
|
||||
)
|
||||
async def deactivate_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Activate a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.deactivate_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't activate drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't activate drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.patch("/drops/{dropper_claim_id}", response_model=data.DropUpdatedResponse)
|
||||
async def update_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
update_request: data.DropUpdateRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Update a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.update_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
title=update_request.title,
|
||||
description=update_request.description,
|
||||
claim_block_deadline=update_request.claim_block_deadline,
|
||||
terminus_address=update_request.terminus_address,
|
||||
terminus_pool_id=update_request.terminus_pool_id,
|
||||
claim_id=update_request.claim_id,
|
||||
address=request.state.address,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't update drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't update drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/drops/{dropper_claim_id}/claimants", response_model=data.ClaimantsResponse)
|
||||
async def get_claimants(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
amount: Optional[int] = None,
|
||||
added_by: Optional[str] = None,
|
||||
address: Optional[str] = None,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of claimants for a given dropper contract.
|
||||
"""
|
||||
if address:
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.get_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
amount=amount,
|
||||
added_by=added_by,
|
||||
address=address,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(f"Can't add claimants for claim {dropper_claim_id} with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error adding claimants")
|
||||
|
||||
return data.ClaimantsResponse(claimants=list(results))
|
||||
|
||||
|
||||
@app.post(
|
||||
"/drops/{dropper_claim_id}/claimants/batch", response_model=data.ClaimantsResponse
|
||||
)
|
||||
async def add_claimants(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
claimants_list: data.BatchAddClaimantsRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.ClaimantsResponse:
|
||||
"""
|
||||
Add addresses to particular claim
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.add_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
claimants=claimants_list.claimants,
|
||||
added_by=request.state.address,
|
||||
)
|
||||
except actions.DublicateClaimantError:
|
||||
raise EngineHTTPException(
|
||||
status_code=400,
|
||||
detail="Dublicated claimants in request please deduplicate them",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(f"Can't add claimants for claim {dropper_claim_id} with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error adding claimants")
|
||||
|
||||
return data.ClaimantsResponse(claimants=results)
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/drops/{dropper_claim_id}/claimants", response_model=data.RemoveClaimantsResponse
|
||||
)
|
||||
async def delete_claimants(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
claimants_list: data.BatchRemoveClaimantsRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.RemoveClaimantsResponse:
|
||||
|
||||
"""
|
||||
Remove addresses to particular claim
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session,
|
||||
dropper_claim_id,
|
||||
request.state.address,
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.delete_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
addresses=claimants_list.claimants,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Can't remove claimants for claim {dropper_claim_id} with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error removing claimants")
|
||||
|
||||
return data.RemoveClaimantsResponse(addresses=results)
|
||||
|
||||
|
||||
@app.get("/drops/{dropper_claim_id}/claimants/search", response_model=data.Claimant)
|
||||
async def get_claimant_in_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
address: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.Claimant:
|
||||
|
||||
"""
|
||||
Return claimant from drop
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session,
|
||||
dropper_claim_id,
|
||||
request.state.address,
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
claimant = actions.get_claimant(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
address=address,
|
||||
)
|
||||
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=404, detail="Address not present in that drop."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get claimant: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
return data.Claimant(
|
||||
address=claimant.address, amount=claimant.amount, raw_amount=claimant.raw_amount
|
||||
)
|
||||
|
||||
|
||||
@app.post("/drop/{dropper_claim_id}/refetch")
|
||||
async def refetch_drop_signatures(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> Any:
|
||||
"""
|
||||
Refetch signatures for a drop
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
signatures = actions.refetch_drop_signatures(
|
||||
db_session=db_session, dropper_claim_id=dropper_claim_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Can't refetch signatures for drop {dropper_claim_id} with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(
|
||||
status_code=500, detail=f"Error refetching signatures"
|
||||
)
|
||||
|
||||
return signatures
|
|
@ -0,0 +1,906 @@
|
|||
"""
|
||||
Lootbox API.
|
||||
"""
|
||||
import logging
|
||||
from typing import List, Optional, Any, Dict
|
||||
from uuid import UUID
|
||||
|
||||
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi import FastAPI, Body, Request, Depends, Query
|
||||
from hexbytes import HexBytes
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from web3 import Web3
|
||||
|
||||
from engineapi.models import DropperClaimant
|
||||
|
||||
from .. import actions
|
||||
from ..contracts import Dropper_interface
|
||||
from .. import data
|
||||
from .. import db
|
||||
from .. import signatures
|
||||
from ..middleware import EngineHTTPException, EngineAuthMiddleware
|
||||
from ..settings import (
|
||||
ORIGINS,
|
||||
DOCS_TARGET_PATH,
|
||||
BLOCKCHAIN_WEB3_PROVIDERS,
|
||||
UNSUPPORTED_BLOCKCHAIN_ERROR_MESSAGE,
|
||||
)
|
||||
from ..version import VERSION
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
tags_metadata = [{"name": "dropper", "description": "Moonstream Engine old drops API"}]
|
||||
|
||||
|
||||
whitelist_paths: Dict[str, str] = {}
|
||||
whitelist_paths.update(
|
||||
{
|
||||
"/drops": "GET",
|
||||
"/drops/batch": "GET",
|
||||
"/drops/claims": "GET",
|
||||
"/drops/contracts": "GET",
|
||||
"/drops/docs": "GET",
|
||||
"/drops/terminus": "GET",
|
||||
"/drops/blockchains": "GET",
|
||||
"/drops/terminus/claims": "GET",
|
||||
"/drops/openapi.json": "GET",
|
||||
}
|
||||
)
|
||||
|
||||
app = FastAPI(
|
||||
title=f"Moonstream Engine old drops API",
|
||||
description="Moonstream Engine old drops API endpoints.",
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
redoc_url=f"/{DOCS_TARGET_PATH}",
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(EngineAuthMiddleware, whitelist=whitelist_paths)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
# TODO(zomglings): Take blockchain as a parameter (perhaps optional) here. Browser-based workflow is that
|
||||
# user would already have selected their blockchain when connecting Metamask.
|
||||
@app.get("", response_model=data.DropResponse)
|
||||
@app.get("/", response_model=data.DropResponse)
|
||||
async def get_drop_handler(
|
||||
dropper_claim_id: UUID,
|
||||
address: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropResponse:
|
||||
"""
|
||||
Get signed transaction for user with the given address.
|
||||
"""
|
||||
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
try:
|
||||
claimant = actions.get_claimant(db_session, dropper_claim_id, address)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="You are not authorized to claim that reward"
|
||||
)
|
||||
except Exception as e:
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
try:
|
||||
claimant_db_object = (
|
||||
db_session.query(DropperClaimant)
|
||||
.filter(DropperClaimant.id == claimant.dropper_claimant_id)
|
||||
.one()
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(
|
||||
f"Can't get claimant object for drop: {dropper_claim_id} and address: {address}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant object.")
|
||||
|
||||
if not claimant.active:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="Cannot claim rewards for an inactive claim"
|
||||
)
|
||||
|
||||
# If block deadline has already been exceeded - the contract (or frontend) will handle it.
|
||||
if claimant.claim_block_deadline is None:
|
||||
raise EngineHTTPException(
|
||||
status_code=403,
|
||||
detail="Cannot claim rewards for a claim with no block deadline",
|
||||
)
|
||||
|
||||
transformed_amount = claimant.raw_amount
|
||||
if transformed_amount is None:
|
||||
transformed_amount = actions.transform_claim_amount(
|
||||
db_session, dropper_claim_id, claimant.amount
|
||||
)
|
||||
|
||||
signature = claimant.signature
|
||||
if signature is None or not claimant.is_recent_signature:
|
||||
dropper_contract = Dropper_interface.Contract(
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[claimant.blockchain],
|
||||
claimant.dropper_contract_address,
|
||||
)
|
||||
message_hash_raw = dropper_contract.claimMessageHash(
|
||||
claimant.claim_id,
|
||||
claimant.address,
|
||||
claimant.claim_block_deadline,
|
||||
int(transformed_amount),
|
||||
).call()
|
||||
|
||||
message_hash = HexBytes(message_hash_raw).hex()
|
||||
|
||||
try:
|
||||
signature = signatures.DROP_SIGNER.sign_message(message_hash)
|
||||
claimant_db_object.signature = signature
|
||||
db_session.commit()
|
||||
except signatures.AWSDescribeInstancesFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except signatures.SignWithInstanceFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except Exception as err:
|
||||
logger.error(f"Unexpected error in signing message process: {err}")
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return data.DropResponse(
|
||||
claimant=claimant.address,
|
||||
amount=str(transformed_amount),
|
||||
claim_id=claimant.claim_id,
|
||||
block_deadline=claimant.claim_block_deadline,
|
||||
signature=signature,
|
||||
title=claimant.title,
|
||||
description=claimant.description,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/batch", response_model=List[data.DropBatchResponseItem])
|
||||
async def get_drop_batch_handler(
|
||||
blockchain: str,
|
||||
address: str,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
current_block_number: Optional[int] = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropBatchResponseItem]:
|
||||
"""
|
||||
Get signed transaction for all user drops.
|
||||
"""
|
||||
if blockchain not in BLOCKCHAIN_WEB3_PROVIDERS:
|
||||
raise EngineHTTPException(
|
||||
status_code=404, detail=UNSUPPORTED_BLOCKCHAIN_ERROR_MESSAGE
|
||||
)
|
||||
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
try:
|
||||
claimant_drops = actions.get_claimant_drops(
|
||||
db_session, blockchain, address, current_block_number, limit, offset
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="You are not authorized to claim that reward"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
# get claimants
|
||||
try:
|
||||
claimants = (
|
||||
db_session.query(DropperClaimant)
|
||||
.filter(
|
||||
DropperClaimant.id.in_(
|
||||
[item.dropper_claimant_id for item in claimant_drops]
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Can't get claimant objects for address: {address}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant objects.")
|
||||
|
||||
claimants_dict = {item.id: item for item in claimants}
|
||||
|
||||
# generate list of claims
|
||||
|
||||
claims: List[data.DropBatchResponseItem] = []
|
||||
|
||||
commit_required = False
|
||||
|
||||
for claimant_drop in claimant_drops:
|
||||
|
||||
transformed_amount = claimant_drop.raw_amount
|
||||
|
||||
if transformed_amount is None:
|
||||
|
||||
transformed_amount = actions.transform_claim_amount(
|
||||
db_session, claimant_drop.dropper_claim_id, claimant_drop.amount
|
||||
)
|
||||
|
||||
signature = claimant_drop.signature
|
||||
if signature is None or not claimant_drop.is_recent_signature:
|
||||
dropper_contract = Dropper_interface.Contract(
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[blockchain],
|
||||
claimant_drop.dropper_contract_address,
|
||||
)
|
||||
|
||||
message_hash_raw = dropper_contract.claimMessageHash(
|
||||
claimant_drop.claim_id,
|
||||
claimant_drop.address,
|
||||
claimant_drop.claim_block_deadline,
|
||||
int(transformed_amount),
|
||||
).call()
|
||||
|
||||
message_hash = HexBytes(message_hash_raw).hex()
|
||||
|
||||
try:
|
||||
signature = signatures.DROP_SIGNER.sign_message(message_hash)
|
||||
claimants_dict[claimant_drop.dropper_claimant_id].signature = signature
|
||||
commit_required = True
|
||||
except signatures.AWSDescribeInstancesFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except signatures.SignWithInstanceFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except Exception as err:
|
||||
logger.error(f"Unexpected error in signing message process: {err}")
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
claims.append(
|
||||
data.DropBatchResponseItem(
|
||||
claimant=claimant_drop.address,
|
||||
amount=int(transformed_amount),
|
||||
amount_string=str(transformed_amount),
|
||||
claim_id=claimant_drop.claim_id,
|
||||
block_deadline=claimant_drop.claim_block_deadline,
|
||||
signature=signature,
|
||||
dropper_claim_id=claimant_drop.dropper_claim_id,
|
||||
dropper_contract_address=claimant_drop.dropper_contract_address,
|
||||
blockchain=claimant_drop.blockchain,
|
||||
active=claimant_drop.active,
|
||||
title=claimant_drop.title,
|
||||
description=claimant_drop.description,
|
||||
)
|
||||
)
|
||||
|
||||
if commit_required:
|
||||
db_session.commit()
|
||||
|
||||
return claims
|
||||
|
||||
|
||||
@app.get("/blockchains")
|
||||
async def get_drops_blockchains_handler(
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperBlockchainResponse]:
|
||||
"""
|
||||
Get list of blockchains.
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_drops_blockchains(db_session=db_session)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of drops end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get drops")
|
||||
|
||||
response = [
|
||||
data.DropperBlockchainResponse(
|
||||
blockchain=result.blockchain,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/contracts", response_model=List[data.DropperContractResponse])
|
||||
async def get_dropper_contracts_handler(
|
||||
blockchain: Optional[str] = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperContractResponse]:
|
||||
"""
|
||||
Get list of drops for a given dropper contract.
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_dropper_contracts(
|
||||
db_session=db_session, blockchain=blockchain
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of dropper contracts end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get contracts")
|
||||
|
||||
response = [
|
||||
data.DropperContractResponse(
|
||||
id=result.id,
|
||||
blockchain=result.blockchain,
|
||||
address=result.address,
|
||||
title=result.title,
|
||||
description=result.description,
|
||||
image_uri=result.image_uri,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/terminus")
|
||||
async def get_drops_terminus_handler(
|
||||
blockchain: str = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperTerminusResponse]:
|
||||
|
||||
"""
|
||||
Return distinct terminus pools
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_drops_terminus(
|
||||
db_session=db_session, blockchain=blockchain
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of terminus contracts end with error: {e}")
|
||||
raise EngineHTTPException(
|
||||
status_code=500, detail="Can't get terminus contracts"
|
||||
)
|
||||
|
||||
response = [
|
||||
data.DropperTerminusResponse(
|
||||
terminus_address=result.terminus_address,
|
||||
terminus_pool_id=result.terminus_pool_id,
|
||||
blockchain=result.blockchain,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/claims", response_model=data.DropListResponse)
|
||||
async def get_drop_list_handler(
|
||||
blockchain: str,
|
||||
claimant_address: str,
|
||||
dropper_contract_address: Optional[str] = Query(None),
|
||||
terminus_address: Optional[str] = Query(None),
|
||||
terminus_pool_id: Optional[int] = Query(None),
|
||||
active: Optional[bool] = Query(None),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of drops for a given dropper contract and claimant address.
|
||||
"""
|
||||
|
||||
if dropper_contract_address:
|
||||
dropper_contract_address = Web3.toChecksumAddress(dropper_contract_address)
|
||||
|
||||
if claimant_address:
|
||||
claimant_address = Web3.toChecksumAddress(claimant_address)
|
||||
|
||||
if terminus_address:
|
||||
terminus_address = Web3.toChecksumAddress(terminus_address)
|
||||
|
||||
try:
|
||||
results = actions.get_claims(
|
||||
db_session=db_session,
|
||||
dropper_contract_address=dropper_contract_address,
|
||||
blockchain=blockchain,
|
||||
claimant_address=claimant_address,
|
||||
terminus_address=terminus_address,
|
||||
terminus_pool_id=terminus_pool_id,
|
||||
active=active,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Can't get claims for user {claimant_address} end with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claims")
|
||||
|
||||
return data.DropListResponse(drops=[result for result in results])
|
||||
|
||||
|
||||
@app.get("/claims/{dropper_claim_id}", response_model=data.DropperClaimResponse)
|
||||
async def get_drop_handler(
|
||||
request: Request,
|
||||
dropper_claim_id: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropperClaimResponse:
|
||||
"""
|
||||
Get list of drops for a given dropper contract and claimant address.
|
||||
"""
|
||||
|
||||
try:
|
||||
drop = actions.get_drop(
|
||||
db_session=db_session, dropper_claim_id=dropper_claim_id
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get drop {dropper_claim_id} end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get drop")
|
||||
|
||||
if drop.terminus_address is not None and drop.terminus_pool_id is not None:
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
return data.DropperClaimResponse(
|
||||
id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
active=drop.active,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/terminus/claims", response_model=data.DropListResponse)
|
||||
async def get_drop_terminus_list_handler(
|
||||
blockchain: str,
|
||||
terminus_address: str,
|
||||
terminus_pool_id: int,
|
||||
dropper_contract_address: Optional[str] = Query(None),
|
||||
active: Optional[bool] = Query(None),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of drops for a given terminus address.
|
||||
"""
|
||||
|
||||
if dropper_contract_address:
|
||||
dropper_contract_address = Web3.toChecksumAddress(dropper_contract_address)
|
||||
|
||||
terminus_address = Web3.toChecksumAddress(terminus_address)
|
||||
|
||||
try:
|
||||
results = actions.get_terminus_claims(
|
||||
db_session=db_session,
|
||||
dropper_contract_address=dropper_contract_address,
|
||||
blockchain=blockchain,
|
||||
terminus_address=terminus_address,
|
||||
terminus_pool_id=terminus_pool_id,
|
||||
active=active,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Can't get Terminus claims (blockchain={blockchain}, address={terminus_address}, pool_id={terminus_pool_id}): {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claims")
|
||||
|
||||
return data.DropListResponse(drops=[result for result in results])
|
||||
|
||||
|
||||
@app.post("/claims", response_model=data.DropCreatedResponse)
|
||||
async def create_drop(
|
||||
request: Request,
|
||||
register_request: data.DropRegisterRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropCreatedResponse:
|
||||
|
||||
"""
|
||||
Create a drop for a given dropper contract.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_dropper_contract_owner(
|
||||
db_session, register_request.dropper_contract_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Dropper contract not found")
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
if register_request.terminus_address:
|
||||
register_request.terminus_address = Web3.toChecksumAddress(
|
||||
register_request.terminus_address
|
||||
)
|
||||
|
||||
try:
|
||||
claim = actions.create_claim(
|
||||
db_session=db_session,
|
||||
dropper_contract_id=register_request.dropper_contract_id,
|
||||
title=register_request.title,
|
||||
description=register_request.description,
|
||||
claim_block_deadline=register_request.claim_block_deadline,
|
||||
terminus_address=register_request.terminus_address,
|
||||
terminus_pool_id=register_request.terminus_pool_id,
|
||||
claim_id=register_request.claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Dropper contract not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't create claim: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't create claim")
|
||||
|
||||
return data.DropCreatedResponse(
|
||||
dropper_claim_id=claim.id,
|
||||
dropper_contract_id=claim.dropper_contract_id,
|
||||
title=claim.title,
|
||||
description=claim.description,
|
||||
claim_block_deadline=claim.claim_block_deadline,
|
||||
terminus_address=claim.terminus_address,
|
||||
terminus_pool_id=claim.terminus_pool_id,
|
||||
claim_id=claim.claim_id,
|
||||
)
|
||||
|
||||
|
||||
@app.put(
|
||||
"/claims/{dropper_claim_id}/activate",
|
||||
response_model=data.DropUpdatedResponse,
|
||||
)
|
||||
async def activate_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Activate a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.activate_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't activate drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't activate drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.put(
|
||||
"/claims/{dropper_claim_id}/deactivate",
|
||||
response_model=data.DropUpdatedResponse,
|
||||
)
|
||||
async def deactivate_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Activate a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.deactivate_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't activate drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't activate drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.put("/claims/{dropper_claim_id}", response_model=data.DropUpdatedResponse)
|
||||
async def update_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
update_request: data.DropUpdateRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropUpdatedResponse:
|
||||
|
||||
"""
|
||||
Update a given drop by drop id.
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
|
||||
try:
|
||||
drop = actions.update_drop(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
title=update_request.title,
|
||||
description=update_request.description,
|
||||
claim_block_deadline=update_request.claim_block_deadline,
|
||||
terminus_address=update_request.terminus_address,
|
||||
terminus_pool_id=update_request.terminus_pool_id,
|
||||
claim_id=update_request.claim_id,
|
||||
address=request.state.address,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="Drop not found")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't update drop: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't update drop")
|
||||
|
||||
return data.DropUpdatedResponse(
|
||||
dropper_claim_id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
active=drop.active,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/claimants", response_model=data.DropListResponse)
|
||||
async def get_claimants(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of claimants for a given dropper contract.
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.get_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(f"Can't add claimants for claim {dropper_claim_id} with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error adding claimants")
|
||||
|
||||
return data.DropListResponse(drops=list(results))
|
||||
|
||||
|
||||
@app.post("/claimants", response_model=data.ClaimantsResponse)
|
||||
async def add_claimants(
|
||||
request: Request,
|
||||
add_claimants_request: data.DropAddClaimantsRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.ClaimantsResponse:
|
||||
"""
|
||||
Add addresses to particular claim
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, add_claimants_request.dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.add_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=add_claimants_request.dropper_claim_id,
|
||||
claimants=add_claimants_request.claimants,
|
||||
added_by=request.state.address,
|
||||
)
|
||||
|
||||
except actions.DublicateClaimantError:
|
||||
raise EngineHTTPException(
|
||||
status_code=400,
|
||||
detail="Dublicated claimants in request please deduplicate them.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Can't add claimants for claim {add_claimants_request.dropper_claim_id} with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error adding claimants")
|
||||
|
||||
return data.ClaimantsResponse(claimants=results)
|
||||
|
||||
|
||||
@app.delete("/claimants", response_model=data.RemoveClaimantsResponse)
|
||||
async def delete_claimants(
|
||||
request: Request,
|
||||
remove_claimants_request: data.DropRemoveClaimantsRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.RemoveClaimantsResponse:
|
||||
|
||||
"""
|
||||
Remove addresses to particular claim
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session,
|
||||
remove_claimants_request.dropper_claim_id,
|
||||
request.state.address,
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
results = actions.delete_claimants(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=remove_claimants_request.dropper_claim_id,
|
||||
addresses=remove_claimants_request.addresses,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Can't remove claimants for claim {remove_claimants_request.dropper_claim_id} with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail=f"Error removing claimants")
|
||||
|
||||
return data.RemoveClaimantsResponse(addresses=results)
|
||||
|
||||
|
||||
@app.get("/claimants/search", response_model=data.Claimant)
|
||||
async def get_claimant_in_drop(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
address: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.Claimant:
|
||||
|
||||
"""
|
||||
Return claimant from drop
|
||||
"""
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session,
|
||||
dropper_claim_id,
|
||||
request.state.address,
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
claimant = actions.get_claimant(
|
||||
db_session=db_session,
|
||||
dropper_claim_id=dropper_claim_id,
|
||||
address=address,
|
||||
)
|
||||
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=404, detail="Address not present in that drop."
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get claimant: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
return data.Claimant(address=claimant.address, amount=claimant.amount)
|
||||
|
||||
|
||||
@app.post("/drop/{dropper_claim_id}/refetch")
|
||||
async def refetch_drop_signatures(
|
||||
request: Request,
|
||||
dropper_claim_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> Any:
|
||||
"""
|
||||
Refetch signatures for a drop
|
||||
"""
|
||||
|
||||
try:
|
||||
actions.ensure_admin_token_holder(
|
||||
db_session, dropper_claim_id, request.state.address
|
||||
)
|
||||
except actions.AuthorizationError as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=403)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
try:
|
||||
signatures = actions.refetch_drop_signatures(
|
||||
db_session=db_session, dropper_claim_id=dropper_claim_id
|
||||
)
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
f"Can't refetch signatures for drop {dropper_claim_id} with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(
|
||||
status_code=500, detail=f"Error refetching signatures"
|
||||
)
|
||||
|
||||
return signatures
|
|
@ -0,0 +1,336 @@
|
|||
"""
|
||||
Leaderboard API.
|
||||
"""
|
||||
import logging
|
||||
from uuid import UUID
|
||||
|
||||
from web3 import Web3
|
||||
from fastapi import FastAPI, Request, Depends, Response
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from typing import List, Optional
|
||||
|
||||
from .. import actions
|
||||
from .. import data
|
||||
from .. import db
|
||||
from ..middleware import ExtractBearerTokenMiddleware, EngineHTTPException
|
||||
from ..settings import DOCS_TARGET_PATH, bugout_client as bc
|
||||
from ..version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
tags_metadata = [
|
||||
{"name": "leaderboard", "description": "Moonstream Engine leaderboard API"}
|
||||
]
|
||||
|
||||
|
||||
leaderboad_whitelist = {
|
||||
"/leaderboard/quartiles": "GET",
|
||||
"/leaderboard/count/addresses": "GET",
|
||||
"/leaderboard/position": "GET",
|
||||
"/leaderboard": "GET",
|
||||
"/leaderboard/rank": "GET",
|
||||
"/leaderboard/ranks": "GET",
|
||||
}
|
||||
|
||||
app = FastAPI(
|
||||
title=f"Moonstream Engine leaderboard API",
|
||||
description="Moonstream Engine leaderboard API endpoints.",
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
redoc_url=f"/{DOCS_TARGET_PATH}",
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(ExtractBearerTokenMiddleware, whitelist=leaderboad_whitelist)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins="*",
|
||||
allow_credentials=False,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/count/addresses")
|
||||
async def count_addresses(
|
||||
leaderboard_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
):
|
||||
|
||||
"""
|
||||
Returns the number of addresses in the leaderboard.
|
||||
"""
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
count = actions.get_leaderboard_total_count(db_session, leaderboard_id)
|
||||
|
||||
return data.CountAddressesResponse(count=count)
|
||||
|
||||
|
||||
@app.get("/quartiles")
|
||||
async def quartiles(
|
||||
leaderboard_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
):
|
||||
|
||||
"""
|
||||
Returns the quartiles of the leaderboard.
|
||||
"""
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
try:
|
||||
q1, q2, q3 = actions.get_qurtiles(db_session, leaderboard_id)
|
||||
|
||||
except actions.LeaderboardIsEmpty:
|
||||
return Response(status_code=204)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting quartiles: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
return data.QuartilesResponse(
|
||||
percentile_25={"address": q1[0], "score": q1[1], "rank": q1[2]},
|
||||
percentile_50={"address": q2[0], "score": q2[1], "rank": q2[2]},
|
||||
percentile_75={"address": q3[0], "score": q3[1], "rank": q3[2]},
|
||||
)
|
||||
|
||||
|
||||
@app.get("/position")
|
||||
async def position(
|
||||
leaderboard_id: UUID,
|
||||
address: str,
|
||||
window_size: int = 1,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
normalize_addresses: bool = True,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
):
|
||||
|
||||
"""
|
||||
Returns the leaderboard posotion for the given address.
|
||||
With given window size.
|
||||
"""
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
if normalize_addresses:
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
positions = actions.get_position(
|
||||
db_session, leaderboard_id, address, window_size, limit, offset
|
||||
)
|
||||
|
||||
return positions
|
||||
|
||||
|
||||
@app.get("")
|
||||
@app.get("/")
|
||||
async def leaderboard(
|
||||
leaderboard_id: UUID,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.LeaderboardPosition]:
|
||||
|
||||
"""
|
||||
Returns the leaderboard positions.
|
||||
"""
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
leaderboard_positions = actions.get_leaderboard_positions(
|
||||
db_session, leaderboard_id, limit, offset
|
||||
)
|
||||
result = [
|
||||
data.LeaderboardPosition(
|
||||
address=position.address,
|
||||
score=position.score,
|
||||
rank=position.rank,
|
||||
points_data=position.points_data,
|
||||
)
|
||||
for position in leaderboard_positions
|
||||
]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@app.get("/rank")
|
||||
async def rank(
|
||||
leaderboard_id: UUID,
|
||||
rank: int = 1,
|
||||
limit: Optional[int] = None,
|
||||
offset: Optional[int] = None,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.LeaderboardPosition]:
|
||||
|
||||
"""
|
||||
Returns the leaderboard scores for the given rank.
|
||||
"""
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
leaderboard_rank = actions.get_rank(
|
||||
db_session, leaderboard_id, rank, limit=limit, offset=offset
|
||||
)
|
||||
results = [
|
||||
data.LeaderboardPosition(
|
||||
address=rank_position.address,
|
||||
score=rank_position.score,
|
||||
rank=rank_position.rank,
|
||||
points_data=rank_position.points_data,
|
||||
)
|
||||
for rank_position in leaderboard_rank
|
||||
]
|
||||
return results
|
||||
|
||||
|
||||
@app.get("/ranks")
|
||||
async def ranks(
|
||||
leaderboard_id: UUID, db_session: Session = Depends(db.yield_db_session)
|
||||
) -> List[data.RanksResponse]:
|
||||
|
||||
"""
|
||||
Returns the leaderboard rank buckets overview with score and size of bucket.
|
||||
"""
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
ranks = actions.get_ranks(db_session, leaderboard_id)
|
||||
results = [
|
||||
data.RanksResponse(
|
||||
score=rank.score,
|
||||
rank=rank.rank,
|
||||
size=rank.size,
|
||||
)
|
||||
for rank in ranks
|
||||
]
|
||||
return results
|
||||
|
||||
|
||||
@app.put("/{leaderboard_id}/scores")
|
||||
async def leaderboard(
|
||||
request: Request,
|
||||
leaderboard_id: UUID,
|
||||
scores: List[data.Score],
|
||||
overwrite: bool = False,
|
||||
normalize_addresses: bool = True,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
):
|
||||
|
||||
"""
|
||||
Put the leaderboard to the database.
|
||||
"""
|
||||
|
||||
access = actions.check_leaderboard_resource_permissions(
|
||||
db_session=db_session,
|
||||
leaderboard_id=leaderboard_id,
|
||||
token=request.state.token,
|
||||
)
|
||||
|
||||
if not access:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="You don't have access to this leaderboard."
|
||||
)
|
||||
|
||||
### Check if leaderboard exists
|
||||
try:
|
||||
actions.get_leaderboard_by_id(db_session, leaderboard_id)
|
||||
except NoResultFound as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Leaderboard not found.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error while getting leaderboard: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Internal server error")
|
||||
|
||||
try:
|
||||
leaderboard_points = actions.add_scores(
|
||||
db_session=db_session,
|
||||
leaderboard_id=leaderboard_id,
|
||||
scores=scores,
|
||||
overwrite=overwrite,
|
||||
normalize_addresses=normalize_addresses,
|
||||
)
|
||||
except actions.DuplicateLeaderboardAddressError as e:
|
||||
raise EngineHTTPException(
|
||||
status_code=409,
|
||||
detail=f"Duplicates in push to database is disallowed.\n List of duplicates:{e.duplicates}.\n Please handle duplicates manualy.",
|
||||
)
|
||||
except actions.LeaderboardDeleteScoresError as e:
|
||||
logger.error(f"Delete scores failed with error: {e}")
|
||||
raise EngineHTTPException(
|
||||
status_code=500,
|
||||
detail=f"Delete scores failed.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Score update failed with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Score update failed.")
|
||||
|
||||
return leaderboard_points
|
|
@ -0,0 +1,337 @@
|
|||
"""
|
||||
Contract registration API
|
||||
|
||||
Moonstream users can register contracts on Moonstream Engine. This allows them to use these contracts
|
||||
as part of their chain-adjacent activities (like performing signature-based token distributions on the
|
||||
Dropper contract).
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Body, Depends, FastAPI, Query, Request, Path
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from sqlalchemy.exc import NoResultFound
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from .. import contracts_actions, data, db
|
||||
from ..middleware import BroodAuthMiddleware, EngineHTTPException
|
||||
from ..settings import DOCS_TARGET_PATH, ORIGINS
|
||||
from ..version import VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
TITLE = "Moonstream Engine Contracts API"
|
||||
DESCRIPTION = "Users can register contracts on the Moonstream Engine for use in chain-adjacent activities, like setting up signature-based token distributions."
|
||||
|
||||
|
||||
tags_metadata = [
|
||||
{
|
||||
"name": "contracts",
|
||||
"description": DESCRIPTION,
|
||||
},
|
||||
{"name": "requests", "description": "Call requests for registered contracts."},
|
||||
]
|
||||
|
||||
|
||||
whitelist_paths = {
|
||||
"/metatx/openapi.json": "GET",
|
||||
f"/metatx/{DOCS_TARGET_PATH}": "GET",
|
||||
"/metatx/contracts/types": "GET",
|
||||
"/metatx/requests": "GET",
|
||||
}
|
||||
|
||||
app = FastAPI(
|
||||
title=TITLE,
|
||||
description=DESCRIPTION,
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
redoc_url=f"/{DOCS_TARGET_PATH}",
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(BroodAuthMiddleware, whitelist=whitelist_paths)
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=ORIGINS,
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/contracts/types", tags=["contracts"])
|
||||
async def contract_types() -> Dict[str, str]:
|
||||
"""
|
||||
Describes the contract_types that users can register contracts as against this API.
|
||||
"""
|
||||
return {
|
||||
data.ContractType.raw.value: "A generic smart contract. You can ask users to submit arbitrary calldata to this contract.",
|
||||
data.ContractType.dropper.value: "A Dropper contract. You can authorize users to submit claims against this contract.",
|
||||
}
|
||||
|
||||
|
||||
@app.get("/contracts", tags=["contracts"], response_model=List[data.RegisteredContract])
|
||||
async def list_registered_contracts(
|
||||
request: Request,
|
||||
blockchain: Optional[str] = Query(None),
|
||||
address: Optional[str] = Query(None),
|
||||
contract_type: Optional[data.ContractType] = Query(None),
|
||||
limit: int = Query(10),
|
||||
offset: Optional[int] = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_read_only_session),
|
||||
) -> List[data.RegisteredContract]:
|
||||
"""
|
||||
Users can use this endpoint to look up the contracts they have registered against this API.
|
||||
"""
|
||||
try:
|
||||
contracts = contracts_actions.lookup_registered_contracts(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
blockchain=blockchain,
|
||||
address=address,
|
||||
contract_type=contract_type,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
return [contract for contract in contracts]
|
||||
|
||||
|
||||
@app.get(
|
||||
"/contracts/{contract_id}",
|
||||
tags=["contracts"],
|
||||
response_model=data.RegisteredContract,
|
||||
)
|
||||
async def get_registered_contract(
|
||||
request: Request,
|
||||
contract_id: UUID = Path(...),
|
||||
db_session: Session = Depends(db.yield_db_read_only_session),
|
||||
) -> List[data.RegisteredContract]:
|
||||
"""
|
||||
Get the contract by ID.
|
||||
"""
|
||||
try:
|
||||
contract = contracts_actions.get_registered_contract(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
contract_id=contract_id,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Either there is not contract with that ID or you do not have access to that contract.",
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
return contract
|
||||
|
||||
|
||||
@app.post("/contracts", tags=["contracts"], response_model=data.RegisteredContract)
|
||||
async def register_contract(
|
||||
request: Request,
|
||||
contract: data.RegisterContractRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.RegisteredContract:
|
||||
"""
|
||||
Allows users to register contracts.
|
||||
"""
|
||||
try:
|
||||
registered_contract = contracts_actions.register_contract(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
blockchain=contract.blockchain,
|
||||
address=contract.address,
|
||||
contract_type=contract.contract_type,
|
||||
title=contract.title,
|
||||
description=contract.description,
|
||||
image_uri=contract.image_uri,
|
||||
)
|
||||
except contracts_actions.ContractAlreadyRegistered:
|
||||
raise EngineHTTPException(
|
||||
status_code=409,
|
||||
detail="Contract already registered",
|
||||
)
|
||||
return registered_contract
|
||||
|
||||
|
||||
@app.put(
|
||||
"/contracts/{contract_id}",
|
||||
tags=["contracts"],
|
||||
response_model=data.RegisteredContract,
|
||||
)
|
||||
async def update_contract(
|
||||
request: Request,
|
||||
contract_id: UUID = Path(...),
|
||||
update_info: data.UpdateContractRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.RegisteredContract:
|
||||
try:
|
||||
contract = contracts_actions.update_registered_contract(
|
||||
db_session,
|
||||
request.state.user.id,
|
||||
contract_id,
|
||||
update_info.title,
|
||||
update_info.description,
|
||||
update_info.image_uri,
|
||||
update_info.ignore_nulls,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="Either there is not contract with that ID or you do not have access to that contract.",
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return contract
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/contracts/{contract_id}",
|
||||
tags=["contracts"],
|
||||
response_model=data.RegisteredContract,
|
||||
)
|
||||
async def delete_contract(
|
||||
request: Request,
|
||||
contract_id: UUID,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.RegisteredContract:
|
||||
"""
|
||||
Allows users to delete contracts that they have registered.
|
||||
"""
|
||||
try:
|
||||
deleted_contract = contracts_actions.delete_registered_contract(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
registered_contract_id=contract_id,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return deleted_contract
|
||||
|
||||
|
||||
@app.get("/requests", tags=["requests"], response_model=List[data.CallRequest])
|
||||
async def list_requests(
|
||||
contract_id: Optional[UUID] = Query(None),
|
||||
contract_address: Optional[str] = Query(None),
|
||||
caller: str = Query(...),
|
||||
limit: int = Query(100),
|
||||
offset: Optional[int] = Query(None),
|
||||
show_expired: Optional[bool] = Query(False),
|
||||
db_session: Session = Depends(db.yield_db_read_only_session),
|
||||
) -> List[data.CallRequest]:
|
||||
"""
|
||||
Allows API user to see all unexpired call requests for a given caller against a given contract.
|
||||
|
||||
At least one of `contract_id` or `contract_address` must be provided as query parameters.
|
||||
"""
|
||||
try:
|
||||
requests = contracts_actions.list_call_requests(
|
||||
db_session=db_session,
|
||||
contract_id=contract_id,
|
||||
contract_address=contract_address,
|
||||
caller=caller,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
show_expired=show_expired,
|
||||
)
|
||||
except ValueError as e:
|
||||
logger.error(repr(e))
|
||||
raise EngineHTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
logger.error(repr(e))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return requests
|
||||
|
||||
|
||||
@app.get("/requests/{request_id}", tags=["requests"], response_model=data.CallRequest)
|
||||
async def get_request(
|
||||
request_id: UUID = Path(...),
|
||||
db_session: Session = Depends(db.yield_db_read_only_session),
|
||||
) -> List[data.CallRequest]:
|
||||
"""
|
||||
Allows API user to see call request.
|
||||
|
||||
At least one of `contract_id` or `contract_address` must be provided as query parameters.
|
||||
"""
|
||||
try:
|
||||
result = contracts_actions.get_call_requests(
|
||||
db_session=db_session,
|
||||
request_id=request_id,
|
||||
)
|
||||
except contracts_actions.CallRequestNotFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=404,
|
||||
detail="There is no call request with that ID.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(repr(e))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@app.post("/requests", tags=["requests"], response_model=int)
|
||||
async def create_requests(
|
||||
request: Request,
|
||||
data: data.CreateCallRequestsAPIRequest = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> int:
|
||||
"""
|
||||
Allows API user to register call requests from given contract details, TTL, and call specifications.
|
||||
|
||||
At least one of `contract_id` or `contract_address` must be provided in the request body.
|
||||
"""
|
||||
try:
|
||||
num_requests = contracts_actions.request_calls(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
registered_contract_id=data.contract_id,
|
||||
contract_address=data.contract_address,
|
||||
call_specs=data.specifications,
|
||||
ttl_days=data.ttl_days,
|
||||
)
|
||||
except contracts_actions.InvalidAddressFormat as err:
|
||||
raise EngineHTTPException(
|
||||
status_code=400,
|
||||
detail=f"Address not passed web3checksum validation, err: {err}",
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return num_requests
|
||||
|
||||
|
||||
@app.delete("/requests", tags=["requests"], response_model=int)
|
||||
async def delete_requests(
|
||||
request: Request,
|
||||
request_ids: List[UUID] = Body(...),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> int:
|
||||
"""
|
||||
Allows users to delete requests.
|
||||
"""
|
||||
try:
|
||||
deleted_requests = contracts_actions.delete_requests(
|
||||
db_session=db_session,
|
||||
moonstream_user_id=request.state.user.id,
|
||||
request_ids=request_ids,
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(repr(err))
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return deleted_requests
|
|
@ -0,0 +1,421 @@
|
|||
"""
|
||||
Moonstream Engine Play API.
|
||||
"""
|
||||
import logging
|
||||
from typing import List, Optional
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request, Depends, Query
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from hexbytes import HexBytes
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from web3 import Web3
|
||||
|
||||
from ..models import DropperClaimant
|
||||
from .. import actions
|
||||
from .. import data
|
||||
from .. import db
|
||||
from .. import signatures
|
||||
from ..contracts import Dropper_interface
|
||||
from ..middleware import EngineHTTPException
|
||||
from ..settings import BLOCKCHAIN_WEB3_PROVIDERS, DOCS_TARGET_PATH
|
||||
from ..version import VERSION
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
tags_metadata = [{"name": "Play", "description": "Moonstream Engine Play API"}]
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title=f"Moonstream Engine Play API",
|
||||
description="Moonstream Engine Play API endpoints.",
|
||||
version=VERSION,
|
||||
openapi_tags=tags_metadata,
|
||||
openapi_url="/openapi.json",
|
||||
docs_url=None,
|
||||
redoc_url=f"/{DOCS_TARGET_PATH}",
|
||||
)
|
||||
|
||||
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins="*",
|
||||
allow_credentials=False,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
|
||||
@app.get("/blockchains")
|
||||
async def get_drops_blockchains_handler(
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperBlockchainResponse]:
|
||||
"""
|
||||
Get list of blockchains.
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_drops_blockchains(db_session=db_session)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of drops end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get drops")
|
||||
|
||||
response = [
|
||||
data.DropperBlockchainResponse(
|
||||
blockchain=result.blockchain,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/claims/batch", response_model=List[data.DropBatchResponseItem])
|
||||
async def get_drop_batch_handler(
|
||||
blockchain: str,
|
||||
address: str,
|
||||
limit: int = 10,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropBatchResponseItem]:
|
||||
"""
|
||||
Get signed transaction for all user drops.
|
||||
"""
|
||||
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
try:
|
||||
claimant_drops = actions.get_claimant_drops(
|
||||
db_session, blockchain, address, limit, offset
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="You are not authorized to claim that reward"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
# get claimants
|
||||
try:
|
||||
claimants = (
|
||||
db_session.query(DropperClaimant)
|
||||
.filter(
|
||||
DropperClaimant.id.in_(
|
||||
[item.dropper_claimant_id for item in claimant_drops]
|
||||
)
|
||||
)
|
||||
.all()
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"Can't get claimant objects for address: {address}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant objects.")
|
||||
|
||||
claimants_dict = {item.id: item for item in claimants}
|
||||
|
||||
# generate list of claims
|
||||
|
||||
claims: List[data.DropBatchResponseItem] = []
|
||||
|
||||
commit_required = False
|
||||
|
||||
for claimant_drop in claimant_drops:
|
||||
|
||||
transformed_amount = claimant_drop.raw_amount
|
||||
|
||||
if transformed_amount is None:
|
||||
|
||||
transformed_amount = actions.transform_claim_amount(
|
||||
db_session, claimant_drop.dropper_claim_id, claimant_drop.amount
|
||||
)
|
||||
|
||||
signature = claimant_drop.signature
|
||||
if signature is None or not claimant_drop.is_recent_signature:
|
||||
dropper_contract = Dropper_interface.Contract(
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[blockchain],
|
||||
claimant_drop.dropper_contract_address,
|
||||
)
|
||||
|
||||
message_hash_raw = dropper_contract.claimMessageHash(
|
||||
claimant_drop.claim_id,
|
||||
claimant_drop.address,
|
||||
claimant_drop.claim_block_deadline,
|
||||
int(transformed_amount),
|
||||
).call()
|
||||
|
||||
message_hash = HexBytes(message_hash_raw).hex()
|
||||
|
||||
try:
|
||||
signature = signatures.DROP_SIGNER.sign_message(message_hash)
|
||||
claimants_dict[claimant_drop.dropper_claimant_id].signature = signature
|
||||
commit_required = True
|
||||
except signatures.AWSDescribeInstancesFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except signatures.SignWithInstanceFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except Exception as err:
|
||||
logger.error(f"Unexpected error in signing message process: {err}")
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
claims.append(
|
||||
data.DropBatchResponseItem(
|
||||
claimant=claimant_drop.address,
|
||||
amount=transformed_amount,
|
||||
amount_string=str(transformed_amount),
|
||||
claim_id=claimant_drop.claim_id,
|
||||
block_deadline=claimant_drop.claim_block_deadline,
|
||||
signature=signature,
|
||||
dropper_claim_id=claimant_drop.dropper_claim_id,
|
||||
dropper_contract_address=claimant_drop.dropper_contract_address,
|
||||
blockchain=claimant_drop.blockchain,
|
||||
active=claimant_drop.active,
|
||||
title=claimant_drop.title,
|
||||
description=claimant_drop.description,
|
||||
)
|
||||
)
|
||||
|
||||
if commit_required:
|
||||
db_session.commit()
|
||||
|
||||
return claims
|
||||
|
||||
|
||||
@app.get("/claims/{dropper_claim_id}", response_model=data.DropResponse)
|
||||
async def get_drop_handler(
|
||||
dropper_claim_id: UUID,
|
||||
address: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropResponse:
|
||||
"""
|
||||
Get signed transaction for user with the given address for that claim.
|
||||
"""
|
||||
|
||||
address = Web3.toChecksumAddress(address)
|
||||
|
||||
try:
|
||||
claimant = actions.get_claimant(db_session, dropper_claim_id, address)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="You are not authorized to claim that reward"
|
||||
)
|
||||
except Exception as e:
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant")
|
||||
|
||||
try:
|
||||
claimant_db_object = (
|
||||
db_session.query(DropperClaimant)
|
||||
.filter(DropperClaimant.id == claimant.dropper_claimant_id)
|
||||
.one()
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(
|
||||
f"Can't get claimant object for drop: {dropper_claim_id} and address: {address}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claimant object.")
|
||||
|
||||
if not claimant.active:
|
||||
raise EngineHTTPException(
|
||||
status_code=403, detail="Cannot claim rewards for an inactive claim"
|
||||
)
|
||||
|
||||
# If block deadline has already been exceeded - the contract (or frontend) will handle it.
|
||||
if claimant.claim_block_deadline is None:
|
||||
raise EngineHTTPException(
|
||||
status_code=403,
|
||||
detail="Cannot claim rewards for a claim with no block deadline",
|
||||
)
|
||||
|
||||
transformed_amount = claimant.raw_amount
|
||||
if transformed_amount is None:
|
||||
transformed_amount = actions.transform_claim_amount(
|
||||
db_session, dropper_claim_id, claimant.amount
|
||||
)
|
||||
|
||||
signature = claimant.signature
|
||||
if signature is None or not claimant.is_recent_signature:
|
||||
dropper_contract = Dropper_interface.Contract(
|
||||
claimant.blockchain, claimant.dropper_contract_address
|
||||
)
|
||||
message_hash_raw = dropper_contract.claimMessageHash(
|
||||
claimant.claim_id,
|
||||
claimant.address,
|
||||
claimant.claim_block_deadline,
|
||||
int(transformed_amount),
|
||||
).call()
|
||||
|
||||
message_hash = HexBytes(message_hash_raw).hex()
|
||||
|
||||
try:
|
||||
signature = signatures.DROP_SIGNER.sign_message(message_hash)
|
||||
claimant_db_object.signature = signature
|
||||
db_session.commit()
|
||||
except signatures.AWSDescribeInstancesFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except signatures.SignWithInstanceFail:
|
||||
raise EngineHTTPException(status_code=500)
|
||||
except Exception as err:
|
||||
logger.error(f"Unexpected error in signing message process: {err}")
|
||||
raise EngineHTTPException(status_code=500)
|
||||
|
||||
return data.DropResponse(
|
||||
claimant=claimant.address,
|
||||
amount=str(transformed_amount),
|
||||
claim_id=claimant.claim_id,
|
||||
block_deadline=claimant.claim_block_deadline,
|
||||
signature=signature,
|
||||
title=claimant.title,
|
||||
description=claimant.description,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/drops", response_model=data.DropListResponse)
|
||||
async def get_drop_list_handler(
|
||||
blockchain: str,
|
||||
claimant_address: str,
|
||||
dropper_contract_address: Optional[str] = Query(None),
|
||||
terminus_address: Optional[str] = Query(None),
|
||||
terminus_pool_id: Optional[int] = Query(None),
|
||||
active: Optional[bool] = Query(None),
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropListResponse:
|
||||
"""
|
||||
Get list of drops for a given dropper contract and claimant address.
|
||||
"""
|
||||
|
||||
if dropper_contract_address:
|
||||
dropper_contract_address = Web3.toChecksumAddress(dropper_contract_address)
|
||||
|
||||
if claimant_address:
|
||||
claimant_address = Web3.toChecksumAddress(claimant_address)
|
||||
|
||||
if terminus_address:
|
||||
terminus_address = Web3.toChecksumAddress(terminus_address)
|
||||
|
||||
try:
|
||||
results = actions.get_claims(
|
||||
db_session=db_session,
|
||||
dropper_contract_address=dropper_contract_address,
|
||||
blockchain=blockchain,
|
||||
claimant_address=claimant_address,
|
||||
terminus_address=terminus_address,
|
||||
terminus_pool_id=terminus_pool_id,
|
||||
active=active,
|
||||
limit=limit,
|
||||
offset=offset,
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Can't get claims for user {claimant_address} end with error: {e}"
|
||||
)
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get claims")
|
||||
|
||||
return data.DropListResponse(drops=[result for result in results])
|
||||
|
||||
|
||||
@app.get("/drops/contracts", response_model=List[data.DropperContractResponse])
|
||||
async def get_dropper_contracts_handler(
|
||||
blockchain: Optional[str] = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperContractResponse]:
|
||||
"""
|
||||
Get list of drops for a given dropper contract.
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_dropper_contracts(
|
||||
db_session=db_session, blockchain=blockchain
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of dropper contracts end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get contracts")
|
||||
|
||||
response = [
|
||||
data.DropperContractResponse(
|
||||
id=result.id,
|
||||
blockchain=result.blockchain,
|
||||
address=result.address,
|
||||
title=result.title,
|
||||
description=result.description,
|
||||
image_uri=result.image_uri,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@app.get("/drops/{dropper_claim_id}", response_model=data.DropperClaimResponse)
|
||||
async def get_drop_handler(
|
||||
request: Request,
|
||||
dropper_claim_id: str,
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> data.DropperClaimResponse:
|
||||
"""
|
||||
Get drop.
|
||||
"""
|
||||
|
||||
try:
|
||||
drop = actions.get_drop(
|
||||
db_session=db_session, dropper_claim_id=dropper_claim_id
|
||||
)
|
||||
except NoResultFound:
|
||||
raise EngineHTTPException(status_code=404, detail="No drops found.")
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get drop {dropper_claim_id} end with error: {e}")
|
||||
raise EngineHTTPException(status_code=500, detail="Can't get drop")
|
||||
|
||||
return data.DropperClaimResponse(
|
||||
id=drop.id,
|
||||
dropper_contract_id=drop.dropper_contract_id,
|
||||
title=drop.title,
|
||||
description=drop.description,
|
||||
active=drop.active,
|
||||
claim_block_deadline=drop.claim_block_deadline,
|
||||
terminus_address=drop.terminus_address,
|
||||
terminus_pool_id=drop.terminus_pool_id,
|
||||
claim_id=drop.claim_id,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/terminus")
|
||||
async def get_drops_terminus_handler(
|
||||
blockchain: str = Query(None),
|
||||
db_session: Session = Depends(db.yield_db_session),
|
||||
) -> List[data.DropperTerminusResponse]:
|
||||
|
||||
"""
|
||||
Return distinct terminus pools
|
||||
"""
|
||||
|
||||
try:
|
||||
results = actions.list_drops_terminus(
|
||||
db_session=db_session, blockchain=blockchain
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Can't get list of terminus contracts end with error: {e}")
|
||||
raise EngineHTTPException(
|
||||
status_code=500, detail="Can't get terminus contracts"
|
||||
)
|
||||
|
||||
response = [
|
||||
data.DropperTerminusResponse(
|
||||
terminus_address=result.terminus_address,
|
||||
terminus_pool_id=result.terminus_pool_id,
|
||||
blockchain=result.blockchain,
|
||||
)
|
||||
for result in results
|
||||
]
|
||||
|
||||
return response
|
|
@ -0,0 +1,157 @@
|
|||
import argparse
|
||||
import logging
|
||||
from typing import Dict, List, Optional, Any
|
||||
|
||||
from .. import db
|
||||
from ..contracts import Dropper_interface, ERC20_interface
|
||||
from ..settings import BLOCKCHAIN_WEB3_PROVIDERS, UNSUPPORTED_BLOCKCHAIN_ERROR_MESSAGE
|
||||
|
||||
|
||||
def run_fill_raw_amount(args: argparse.Namespace):
|
||||
# sync raw_amount column with amount column
|
||||
|
||||
# create chache of claim token type
|
||||
# newtwork contract and list of claims with their token type
|
||||
|
||||
token_types: Dict[str, Dict[str, List[Dict[str, Any]]]] = dict()
|
||||
|
||||
with db.yield_db_session_ctx() as db_session:
|
||||
|
||||
res = db_session.execute(
|
||||
"""select distinct dropper_contracts.blockchain, dropper_contracts.address, dropper_claims.claim_id from dropper_contracts
|
||||
left join dropper_claims on dropper_contracts.id = dropper_claims.dropper_contract_id
|
||||
where dropper_claims.claim_id is not null"""
|
||||
)
|
||||
results = res.fetchall()
|
||||
|
||||
for blockchain, address, claim_id in results:
|
||||
if blockchain not in token_types:
|
||||
token_types[blockchain] = dict()
|
||||
if address not in token_types[blockchain]:
|
||||
token_types[blockchain][address] = list()
|
||||
token_types[blockchain][address].append(claim_id)
|
||||
|
||||
db_session.execute(
|
||||
"""
|
||||
create table temptest
|
||||
(
|
||||
blockchain varchar,
|
||||
address varchar,
|
||||
claim_id varchar,
|
||||
token_type varchar,
|
||||
zeros varchar
|
||||
)
|
||||
|
||||
"""
|
||||
)
|
||||
|
||||
for blockchain in token_types:
|
||||
if blockchain not in BLOCKCHAIN_WEB3_PROVIDERS:
|
||||
logging.warn(
|
||||
f"Blockchain: {blockchain}. {UNSUPPORTED_BLOCKCHAIN_ERROR_MESSAGE}"
|
||||
)
|
||||
continue
|
||||
for address in token_types[blockchain]:
|
||||
dropper_contract = Dropper_interface.Contract(
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[blockchain], address
|
||||
)
|
||||
|
||||
for claim_id in token_types[blockchain][address]:
|
||||
claim_info = dropper_contract.getClaim(claim_id).call()
|
||||
zeros = None
|
||||
if claim_info[0] == 20:
|
||||
erc20_contract = ERC20_interface.Contract(
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[blockchain], claim_info[1]
|
||||
)
|
||||
zeros = "0" * erc20_contract.decimals()
|
||||
|
||||
db_session.execute(
|
||||
"""
|
||||
insert into temptest
|
||||
(
|
||||
blockchain,
|
||||
address,
|
||||
claim_id,
|
||||
token_type,
|
||||
zeros
|
||||
|
||||
)
|
||||
values
|
||||
(
|
||||
:blockchain,
|
||||
:address,
|
||||
:claim_id,
|
||||
:token_type,
|
||||
:zeros
|
||||
)
|
||||
""",
|
||||
{
|
||||
"blockchain": blockchain,
|
||||
"address": address,
|
||||
"claim_id": str(claim_id),
|
||||
"token_type": str(claim_info[0]),
|
||||
"zeros": zeros,
|
||||
},
|
||||
)
|
||||
|
||||
db_session.commit()
|
||||
|
||||
# update raw_amount column
|
||||
db_session.execute(
|
||||
"""
|
||||
update
|
||||
dropper_claimants
|
||||
set
|
||||
raw_amount = (
|
||||
CASE
|
||||
WHEN (
|
||||
select
|
||||
DISTINCT temptest.token_type
|
||||
from
|
||||
temptest
|
||||
inner join dropper_claims ON temptest.claim_id :: int = dropper_claims.claim_id
|
||||
where
|
||||
dropper_claims.id = dropper_claimants.dropper_claim_id
|
||||
) :: int = 20 THEN CASE
|
||||
WHEN dropper_claimants.amount is not null
|
||||
and dropper_claimants.amount > 0 THEN CONCAT(
|
||||
CAST(dropper_claimants.amount as varchar),
|
||||
(
|
||||
select
|
||||
temptest.zeros
|
||||
from
|
||||
temptest
|
||||
inner join dropper_claims ON temptest.claim_id :: int = dropper_claims.claim_id
|
||||
where
|
||||
dropper_claims.id = dropper_claimants.dropper_claim_id
|
||||
)
|
||||
)
|
||||
WHEN true THEN CAST(dropper_claimants.amount as varchar)
|
||||
END
|
||||
WHEN true THEN CAST(dropper_claimants.amount as varchar)
|
||||
END
|
||||
);
|
||||
"""
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="dao: The command line interface to Moonstream DAO"
|
||||
)
|
||||
parser.set_defaults(func=lambda _: parser.print_help())
|
||||
subparsers = parser.add_subparsers()
|
||||
|
||||
run_fill_raw_amount_parser = subparsers.add_parser(
|
||||
"fill_raw_amount", help="Fill raw_amount column"
|
||||
)
|
||||
|
||||
run_fill_raw_amount_parser.set_defaults(func=run_fill_raw_amount)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,180 @@
|
|||
import os
|
||||
import warnings
|
||||
|
||||
from web3 import Web3, HTTPProvider
|
||||
from web3.middleware import geth_poa_middleware
|
||||
from bugout.app import Bugout
|
||||
|
||||
# Bugout
|
||||
BUGOUT_BROOD_URL = os.environ.get("BUGOUT_BROOD_URL", "https://auth.bugout.dev")
|
||||
BUGOUT_SPIRE_URL = os.environ.get("BUGOUT_SPIRE_URL", "https://spire.bugout.dev")
|
||||
|
||||
bugout_client = Bugout(brood_api_url=BUGOUT_BROOD_URL, spire_api_url=BUGOUT_SPIRE_URL)
|
||||
|
||||
|
||||
ENGINE_DEV_RAW = os.environ.get("ENGINE_DEV", "")
|
||||
ENGINE_DEV = True if ENGINE_DEV_RAW in {"1", "true", "yes", "t", "y"} else False
|
||||
|
||||
# Authorized origins for CORS
|
||||
RAW_ORIGINS = os.environ.get("ENGINE_CORS_ALLOWED_ORIGINS")
|
||||
if RAW_ORIGINS is None:
|
||||
raise ValueError(
|
||||
"ENGINE_CORS_ALLOWED_ORIGINS environment variable must be set (comma-separated list of CORS allowed origins)"
|
||||
)
|
||||
ORIGINS = RAW_ORIGINS.split(",")
|
||||
|
||||
# Open API documentation path
|
||||
DOCS_TARGET_PATH = os.environ.get("DOCS_TARGET_PATH", "docs")
|
||||
|
||||
|
||||
# If SIGNER_KEYSTORE and SIGNER_PASSWORD are set, then we use the local signer.
|
||||
# Otherwise, we use the AWS signer.
|
||||
SIGNER_KEYSTORE = os.environ.get("SIGNER_KEYSTORE")
|
||||
SIGNER_PASSWORD = os.environ.get("SIGNER_PASSWORD")
|
||||
|
||||
MOONSTREAM_SIGNING_SERVER_IP = os.environ.get("MOONSTREAM_SIGNING_SERVER_IP", None)
|
||||
|
||||
# Settings related to the AWS signer
|
||||
AWS_DEFAULT_REGION = os.environ.get("AWS_DEFAULT_REGION")
|
||||
if AWS_DEFAULT_REGION is None:
|
||||
if not ENGINE_DEV:
|
||||
raise ValueError("AWS_DEFAULT_REGION environment variable must be set")
|
||||
else:
|
||||
warnings.warn(
|
||||
'AWS_DEFAULT_REGION environment variable is not set. Using "us-east-1".'
|
||||
)
|
||||
AWS_DEFAULT_REGION = "us-east-1"
|
||||
|
||||
MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID = os.environ.get(
|
||||
"MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID"
|
||||
)
|
||||
if MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID is None:
|
||||
if not ENGINE_DEV:
|
||||
raise ValueError(
|
||||
"MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID environment variable must be set"
|
||||
)
|
||||
else:
|
||||
warnings.warn(
|
||||
"MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID environment variable is not set."
|
||||
)
|
||||
|
||||
MOONSTREAM_AWS_SIGNER_IMAGE_ID = os.environ.get("MOONSTREAM_AWS_SIGNER_IMAGE_ID")
|
||||
if MOONSTREAM_AWS_SIGNER_IMAGE_ID is None:
|
||||
if not ENGINE_DEV:
|
||||
raise ValueError(
|
||||
"MOONSTREAM_AWS_SIGNER_IMAGE_ID environment variable must be set"
|
||||
)
|
||||
else:
|
||||
warnings.warn("MOONSTREAM_AWS_SIGNER_IMAGE_ID environment is not set.")
|
||||
|
||||
MOONSTREAM_AWS_SIGNER_INSTANCE_PORT = 17181
|
||||
|
||||
# Blockchain configuration
|
||||
|
||||
MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI = os.environ.get(
|
||||
"MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI"
|
||||
)
|
||||
MOONSTREAM_MUMBAI_WEB3_PROVIDER_URI = os.environ.get(
|
||||
"MOONSTREAM_MUMBAI_WEB3_PROVIDER_URI"
|
||||
)
|
||||
MOONSTREAM_POLYGON_WEB3_PROVIDER_URI = os.environ.get(
|
||||
"MOONSTREAM_POLYGON_WEB3_PROVIDER_URI"
|
||||
)
|
||||
MOONSTREAM_XDAI_WEB3_PROVIDER_URI = os.environ.get("MOONSTREAM_XDAI_WEB3_PROVIDER_URI")
|
||||
|
||||
# TODO(kompotkot): Leave a comment here explaining templated *_WEB3_PROVIDER_URI when we set
|
||||
# NODEBALANCER_ACCESS_ID
|
||||
ETHEREUM_PROVIDER_URI = MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI
|
||||
MUMBAI_PROVIDER_URI = MOONSTREAM_MUMBAI_WEB3_PROVIDER_URI
|
||||
POLYGON_PROVIDER_URI = MOONSTREAM_POLYGON_WEB3_PROVIDER_URI
|
||||
XDAI_PROVIDER_URI = MOONSTREAM_XDAI_WEB3_PROVIDER_URI
|
||||
|
||||
NODEBALANCER_ACCESS_ID = os.environ.get("ENGINE_NODEBALANCER_ACCESS_ID")
|
||||
if NODEBALANCER_ACCESS_ID is not None:
|
||||
NODEBALANCER_URI_TEMPLATE = "{}?access_id={}&data_source=blockchain"
|
||||
ETHEREUM_PROVIDER_URI = NODEBALANCER_URI_TEMPLATE.format(
|
||||
MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI, NODEBALANCER_ACCESS_ID
|
||||
)
|
||||
MUMBAI_PROVIDER_URI = NODEBALANCER_URI_TEMPLATE.format(
|
||||
MOONSTREAM_MUMBAI_WEB3_PROVIDER_URI, NODEBALANCER_ACCESS_ID
|
||||
)
|
||||
POLYGON_PROVIDER_URI = NODEBALANCER_URI_TEMPLATE.format(
|
||||
MOONSTREAM_POLYGON_WEB3_PROVIDER_URI, NODEBALANCER_ACCESS_ID
|
||||
)
|
||||
XDAI_PROVIDER_URI = NODEBALANCER_URI_TEMPLATE.format(
|
||||
MOONSTREAM_XDAI_WEB3_PROVIDER_URI, NODEBALANCER_ACCESS_ID
|
||||
)
|
||||
|
||||
BLOCKCHAIN_PROVIDER_URIS = {
|
||||
"ethereum": ETHEREUM_PROVIDER_URI,
|
||||
"mumbai": MUMBAI_PROVIDER_URI,
|
||||
"polygon": POLYGON_PROVIDER_URI,
|
||||
"xdai": XDAI_PROVIDER_URI,
|
||||
}
|
||||
|
||||
SUPPORTED_BLOCKCHAINS = ", ".join(BLOCKCHAIN_PROVIDER_URIS)
|
||||
UNSUPPORTED_BLOCKCHAIN_ERROR_MESSAGE = f"That blockchain is not supported. The supported blockchains are: {SUPPORTED_BLOCKCHAINS}."
|
||||
|
||||
BLOCKCHAIN_WEB3_PROVIDERS = {
|
||||
blockchain: Web3(HTTPProvider(jsonrpc_uri))
|
||||
for blockchain, jsonrpc_uri in BLOCKCHAIN_PROVIDER_URIS.items()
|
||||
}
|
||||
|
||||
# For Proof-of-Authority chains (e.g. Polygon), inject the geth_poa_middleware into the web3 client:
|
||||
# https://web3py.readthedocs.io/en/stable/middleware.html#geth-style-proof-of-authority
|
||||
# For every chain represented in BLOCKCHAIN_WEB3_PROVIDERS and BLOCKCHAIN_PROVIDER_URIS, if the chain
|
||||
# is a proof-of-authority chain, add it to the POA_CHAINS list, as well.
|
||||
POA_CHAINS = ["mumbai", "polygon"]
|
||||
for chain in POA_CHAINS:
|
||||
BLOCKCHAIN_WEB3_PROVIDERS[chain].middleware_onion.inject(
|
||||
geth_poa_middleware, layer=0
|
||||
)
|
||||
|
||||
# Database
|
||||
ENGINE_DB_URI = os.environ.get("ENGINE_DB_URI")
|
||||
if ENGINE_DB_URI is None:
|
||||
raise ValueError("ENGINE_DB_URI environment variable must be set")
|
||||
|
||||
ENGINE_DB_URI_READ_ONLY = os.environ.get("ENGINE_DB_URI_READ_ONLY")
|
||||
if ENGINE_DB_URI_READ_ONLY is None:
|
||||
raise ValueError("ENGINE_DB_URI_READ_ONLY environment variable must be set")
|
||||
|
||||
ENGINE_POOL_SIZE_RAW = os.environ.get("ENGINE_POOL_SIZE")
|
||||
ENGINE_POOL_SIZE = 0
|
||||
try:
|
||||
if ENGINE_POOL_SIZE_RAW is not None:
|
||||
ENGINE_POOL_SIZE = int(ENGINE_POOL_SIZE_RAW)
|
||||
except:
|
||||
raise Exception(f"Could not parse ENGINE_POOL_SIZE as int: {ENGINE_POOL_SIZE_RAW}")
|
||||
|
||||
ENGINE_DB_STATEMENT_TIMEOUT_MILLIS_RAW = os.environ.get(
|
||||
"ENGINE_DB_STATEMENT_TIMEOUT_MILLIS"
|
||||
)
|
||||
ENGINE_DB_STATEMENT_TIMEOUT_MILLIS = 30000
|
||||
try:
|
||||
if ENGINE_DB_STATEMENT_TIMEOUT_MILLIS_RAW is not None:
|
||||
ENGINE_DB_STATEMENT_TIMEOUT_MILLIS = int(ENGINE_DB_STATEMENT_TIMEOUT_MILLIS_RAW)
|
||||
except:
|
||||
raise ValueError(
|
||||
f"ENGINE_DB_STATEMENT_TIMEOUT_MILLIOS must be an integer: {ENGINE_DB_STATEMENT_TIMEOUT_MILLIS_RAW}"
|
||||
)
|
||||
|
||||
ENGINE_DB_POOL_RECYCLE_SECONDS_RAW = os.environ.get("ENGINE_DB_POOL_RECYCLE_SECONDS")
|
||||
ENGINE_DB_POOL_RECYCLE_SECONDS = 1800
|
||||
try:
|
||||
if ENGINE_DB_POOL_RECYCLE_SECONDS_RAW is not None:
|
||||
ENGINE_DB_POOL_RECYCLE_SECONDS = int(ENGINE_DB_POOL_RECYCLE_SECONDS_RAW)
|
||||
except:
|
||||
raise ValueError(
|
||||
f"ENGINE_DB_POOL_RECYCLE_SECONDS must be an integer: {ENGINE_DB_POOL_RECYCLE_SECONDS_RAW}"
|
||||
)
|
||||
|
||||
MOONSTREAM_APPLICATION_ID = os.environ.get("MOONSTREAM_APPLICATION_ID", "")
|
||||
if MOONSTREAM_APPLICATION_ID == "":
|
||||
raise ValueError("MOONSTREAM_APPLICATION_ID environment variable must be set")
|
||||
|
||||
LEADERBOARD_RESOURCE_TYPE = "leaderboard"
|
||||
|
||||
MOONSTREAM_ADMIN_ACCESS_TOKEN = os.environ.get("MOONSTREAM_ADMIN_ACCESS_TOKEN", "")
|
||||
if MOONSTREAM_ADMIN_ACCESS_TOKEN == "":
|
||||
raise ValueError("MOONSTREAM_ADMIN_ACCESS_TOKEN environment variable must be set")
|
|
@ -0,0 +1,326 @@
|
|||
"""
|
||||
Signing and signature verification functionality and interfaces.
|
||||
"""
|
||||
import abc
|
||||
import logging
|
||||
import json
|
||||
from typing import Any, List, Optional, Union
|
||||
|
||||
import boto3
|
||||
from web3 import Web3
|
||||
|
||||
from eth_account import Account
|
||||
from eth_account.messages import encode_defunct
|
||||
from eth_account._utils.signing import sign_message_hash
|
||||
import eth_keys
|
||||
import requests
|
||||
from hexbytes import HexBytes
|
||||
|
||||
from .settings import (
|
||||
SIGNER_KEYSTORE,
|
||||
SIGNER_PASSWORD,
|
||||
MOONSTREAM_SIGNING_SERVER_IP,
|
||||
AWS_DEFAULT_REGION,
|
||||
MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID,
|
||||
MOONSTREAM_AWS_SIGNER_IMAGE_ID,
|
||||
MOONSTREAM_AWS_SIGNER_INSTANCE_PORT,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
aws_client = boto3.client("ec2", region_name=AWS_DEFAULT_REGION)
|
||||
|
||||
|
||||
class AWSDescribeInstancesFail(Exception):
|
||||
"""
|
||||
Raised when AWS describe instances command failed.
|
||||
"""
|
||||
|
||||
|
||||
class AWSRunInstancesFail(Exception):
|
||||
"""
|
||||
Raised when AWS run instances command failed.
|
||||
"""
|
||||
|
||||
|
||||
class AWSTerminateInstancesFail(Exception):
|
||||
"""
|
||||
Raised when AWS terminate instances command failed.
|
||||
"""
|
||||
|
||||
|
||||
class SigningInstancesNotFound(Exception):
|
||||
"""
|
||||
Raised when signing instances with the given ids is not found in at AWS.
|
||||
"""
|
||||
|
||||
|
||||
class SigningInstancesTerminationLimitExceeded(Exception):
|
||||
"""
|
||||
Raised when provided several instances to termination.
|
||||
"""
|
||||
|
||||
|
||||
class SignWithInstanceFail(Exception):
|
||||
"""
|
||||
Raised when failed signing of message with instance server.
|
||||
"""
|
||||
|
||||
|
||||
class Signer:
|
||||
@abc.abstractmethod
|
||||
def sign_message(self, message):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def refresh_signer(self):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def batch_sign_message(self, messages_list):
|
||||
pass
|
||||
|
||||
|
||||
class AccountSigner(Signer):
|
||||
"""
|
||||
Simple implementation of a signer that uses a Brownie account to sign messages.
|
||||
"""
|
||||
|
||||
def __init__(self, private_key: HexBytes) -> None:
|
||||
self.private_key = private_key
|
||||
|
||||
def sign_message(self, message):
|
||||
eth_private_key = eth_keys.keys.PrivateKey(self.private_key)
|
||||
message_hash_bytes = HexBytes(message)
|
||||
_, _, _, signed_message_bytes = sign_message_hash(
|
||||
eth_private_key, message_hash_bytes
|
||||
)
|
||||
return signed_message_bytes.hex()
|
||||
|
||||
def batch_sign_message(self, messages_list: List[str]):
|
||||
|
||||
signed_messages_list = {}
|
||||
|
||||
for message in messages_list:
|
||||
eth_private_key = eth_keys.keys.PrivateKey(self.private_key)
|
||||
message_hash_bytes = HexBytes(message)
|
||||
_, _, _, signed_message_bytes = sign_message_hash(
|
||||
eth_private_key, message_hash_bytes
|
||||
)
|
||||
signed_messages_list[message.hex()] = signed_message_bytes.hex()
|
||||
|
||||
return signed_messages_list
|
||||
|
||||
|
||||
def create_account_signer(keystore: str, password: str) -> AccountSigner:
|
||||
with open(keystore) as keystore_file:
|
||||
keystore_data = json.load(keystore_file)
|
||||
private_key = Account.decrypt(keystore_data, password)
|
||||
signer = AccountSigner(private_key)
|
||||
return signer
|
||||
|
||||
|
||||
class InstanceSigner(Signer):
|
||||
"""
|
||||
AWS instance server signer.
|
||||
"""
|
||||
|
||||
def __init__(self, ip: Optional[str] = None) -> None:
|
||||
self.current_signer_uri = None
|
||||
if ip is not None:
|
||||
self.current_signer_uri = (
|
||||
f"http://{ip}:{MOONSTREAM_AWS_SIGNER_INSTANCE_PORT}/sign"
|
||||
)
|
||||
self.current_signer_batch_uri = (
|
||||
f"http://{ip}:{MOONSTREAM_AWS_SIGNER_INSTANCE_PORT}/batchsign"
|
||||
)
|
||||
|
||||
def clean_signer(self) -> None:
|
||||
self.current_signer_uri = None
|
||||
self.current_signer_batch_uri = None
|
||||
|
||||
def refresh_signer(self) -> None:
|
||||
try:
|
||||
instances = list_signing_instances([])
|
||||
except AWSDescribeInstancesFail:
|
||||
raise AWSDescribeInstancesFail("AWS describe instances command failed")
|
||||
except Exception as err:
|
||||
logger.error(f"AWS describe instances command failed: {err}")
|
||||
raise SignWithInstanceFail("AWS describe instances command failed")
|
||||
|
||||
if len(instances) != 1:
|
||||
raise SignWithInstanceFail("Unsupported number of signing instances")
|
||||
|
||||
self.current_signer_uri = f"http://{instances[0]['private_ip_address']}:{MOONSTREAM_AWS_SIGNER_INSTANCE_PORT}/sign"
|
||||
self.current_signer_batch_uri = f"http://{instances[0]['private_ip_address']}:{MOONSTREAM_AWS_SIGNER_INSTANCE_PORT}/batchsign"
|
||||
|
||||
def sign_message(self, message: str):
|
||||
# TODO(kompotkot): What to do if self.current_signer_uri is not None but the signing server went down?
|
||||
if self.current_signer_uri is None:
|
||||
self.refresh_signer()
|
||||
|
||||
signed_message = ""
|
||||
try:
|
||||
resp = requests.post(
|
||||
self.current_signer_uri,
|
||||
headers={"Content-Type": "application/json"},
|
||||
json={"unsigned_data": str(message)},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
body = resp.json()
|
||||
signed_message = body["signed_data"]
|
||||
except Exception as err:
|
||||
logger.error(f"Failed signing of message with instance server, {err}")
|
||||
raise SignWithInstanceFail("Failed signing of message with instance server")
|
||||
|
||||
# Hack as per: https://medium.com/@yaoshiang/ethereums-ecrecover-openzeppelin-s-ecdsa-and-web3-s-sign-8ff8d16595e1
|
||||
signature = signed_message[2:]
|
||||
if signature[-2:] == "00":
|
||||
signature = f"{signature[:-2]}1b"
|
||||
elif signature[-2:] == "01":
|
||||
signature = f"{signature[:-2]}1c"
|
||||
else:
|
||||
raise SignWithInstanceFail(
|
||||
f"Unexpected v-value on signed message: {signed_message[-2:]}"
|
||||
)
|
||||
|
||||
return signature
|
||||
|
||||
def batch_sign_message(self, messages_list: List[str]):
|
||||
if self.current_signer_uri is None:
|
||||
self.refresh_signer()
|
||||
|
||||
try:
|
||||
resp = requests.post(
|
||||
self.current_signer_batch_uri,
|
||||
headers={"Content-Type": "application/json"},
|
||||
json={"unsigned_data": [str(message) for message in messages_list]},
|
||||
)
|
||||
resp.raise_for_status()
|
||||
signed_messages = resp.json()["signed_data"]
|
||||
except Exception as err:
|
||||
logger.error(f"Failed signing of message with instance server, {err}")
|
||||
raise SignWithInstanceFail("Failed signing of message with instance server")
|
||||
|
||||
results = {}
|
||||
|
||||
# Hack as per: https://medium.com/@yaoshiang/ethereums-ecrecover-openzeppelin-s-ecdsa-and-web3-s-sign-8ff8d16595e1
|
||||
for unsigned_message, signed_message in signed_messages.items():
|
||||
signature = signed_message[2:]
|
||||
if signature[-2:] == "00":
|
||||
signature = f"{signature[:-2]}1b"
|
||||
elif signature[-2:] == "01":
|
||||
signature = f"{signature[:-2]}1c"
|
||||
else:
|
||||
raise SignWithInstanceFail(
|
||||
f"Unexpected v-value on signed message: {signed_message[-2:]}"
|
||||
)
|
||||
results[unsigned_message] = signature
|
||||
|
||||
return results
|
||||
|
||||
|
||||
DROP_SIGNER: Optional[Signer] = None
|
||||
if SIGNER_KEYSTORE is not None and SIGNER_PASSWORD is not None:
|
||||
DROP_SIGNER = create_account_signer(SIGNER_KEYSTORE, SIGNER_PASSWORD)
|
||||
if DROP_SIGNER is None:
|
||||
DROP_SIGNER = InstanceSigner(MOONSTREAM_SIGNING_SERVER_IP)
|
||||
|
||||
|
||||
def list_signing_instances(
|
||||
signing_instances: List[str],
|
||||
) -> List[Any]:
|
||||
"""
|
||||
Return a list of signing instances with IPs.
|
||||
"""
|
||||
described_instances = []
|
||||
try:
|
||||
described_instances_response = aws_client.describe_instances(
|
||||
Filters=[
|
||||
{"Name": "image-id", "Values": [MOONSTREAM_AWS_SIGNER_IMAGE_ID]},
|
||||
{"Name": "tag:Application", "Values": ["signer"]},
|
||||
],
|
||||
InstanceIds=signing_instances,
|
||||
)
|
||||
for r in described_instances_response["Reservations"]:
|
||||
for i in r["Instances"]:
|
||||
described_instances.append(
|
||||
{
|
||||
"instance_id": i["InstanceId"],
|
||||
"private_ip_address": i["PrivateIpAddress"],
|
||||
}
|
||||
)
|
||||
except Exception as err:
|
||||
logger.error(f"AWS describe instances command failed: {err}")
|
||||
raise AWSDescribeInstancesFail("AWS describe instances command failed.")
|
||||
|
||||
return described_instances
|
||||
|
||||
|
||||
def wakeup_signing_instances(run_confirmed=False, dry_run=True) -> List[str]:
|
||||
"""
|
||||
Run new signing instances.
|
||||
"""
|
||||
run_instances = []
|
||||
if run_confirmed:
|
||||
try:
|
||||
run_instances_response = aws_client.run_instances(
|
||||
LaunchTemplate={
|
||||
"LaunchTemplateId": MOONSTREAM_AWS_SIGNER_LAUNCH_TEMPLATE_ID
|
||||
},
|
||||
MinCount=1,
|
||||
MaxCount=1,
|
||||
DryRun=dry_run,
|
||||
)
|
||||
for i in run_instances_response["Instances"]:
|
||||
run_instances.append(i["InstanceId"])
|
||||
except Exception as err:
|
||||
logger.error(f"AWS run instances command failed: {err}")
|
||||
raise AWSRunInstancesFail("AWS run instances command failed")
|
||||
|
||||
return run_instances
|
||||
|
||||
|
||||
def sleep_signing_instances(
|
||||
signing_instances: List[str], termination_confirmed=False, dry_run=True
|
||||
) -> List[str]:
|
||||
"""
|
||||
Fetch, describe, verify signing instances and terminate them.
|
||||
"""
|
||||
if len(signing_instances) == 0:
|
||||
raise SigningInstancesNotFound("There are no signing instances to describe")
|
||||
|
||||
described_instances = []
|
||||
try:
|
||||
described_instances_response = list_signing_instances(signing_instances)
|
||||
for i in described_instances_response:
|
||||
described_instances.append(i["instance_id"])
|
||||
except Exception as err:
|
||||
logger.error(f"AWS describe instances command failed: {err}")
|
||||
raise AWSDescribeInstancesFail("AWS describe instances command failed.")
|
||||
|
||||
if len(described_instances) == 0:
|
||||
raise SigningInstancesNotFound(
|
||||
"Signing instances with the given ids is not found in at AWS."
|
||||
)
|
||||
if len(described_instances) > 1:
|
||||
raise SigningInstancesTerminationLimitExceeded(
|
||||
f"Provided {len(described_instances)} instances to termination"
|
||||
)
|
||||
|
||||
terminated_instances = []
|
||||
if termination_confirmed:
|
||||
try:
|
||||
terminated_instances_response = aws_client.terminate_instances(
|
||||
InstanceIds=described_instances,
|
||||
DryRun=dry_run,
|
||||
)
|
||||
for i in terminated_instances_response["TerminatingInstances"]:
|
||||
terminated_instances.append(i["InstanceId"])
|
||||
except Exception as err:
|
||||
logger.error(
|
||||
f"Unable to terminate instance {described_instances}, error: {err}"
|
||||
)
|
||||
raise AWSTerminateInstancesFail("AWS terminate instances command failed")
|
||||
|
||||
return terminated_instances
|
|
@ -0,0 +1,55 @@
|
|||
import time
|
||||
import unittest
|
||||
|
||||
from brownie import network, accounts
|
||||
from hexbytes import HexBytes
|
||||
|
||||
from .auth import (
|
||||
authorize,
|
||||
verify,
|
||||
MoonstreamAuthorizationVerificationError,
|
||||
MoonstreamAuthorizationExpired,
|
||||
)
|
||||
|
||||
|
||||
class TestAuth(unittest.TestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
try:
|
||||
network.connect()
|
||||
except:
|
||||
pass
|
||||
cls.signer = accounts.add()
|
||||
|
||||
cls.non_signer = accounts.add()
|
||||
|
||||
def test_authorization_and_verification(self):
|
||||
current_time = int(time.time())
|
||||
payload = authorize(
|
||||
current_time + 300, self.signer.address, HexBytes(self.signer.private_key)
|
||||
)
|
||||
self.assertDictContainsSubset(
|
||||
{"address": self.signer.address, "deadline": current_time + 300}, payload
|
||||
)
|
||||
self.assertTrue(verify(payload))
|
||||
|
||||
def test_authorization_and_verification_fails_for_wrong_address(self):
|
||||
current_time = int(time.time())
|
||||
payload = authorize(
|
||||
current_time + 300, self.signer.address, HexBytes(self.signer.private_key)
|
||||
)
|
||||
payload["address"] = self.non_signer.address
|
||||
with self.assertRaises(MoonstreamAuthorizationVerificationError):
|
||||
verify(payload)
|
||||
|
||||
def test_authorization_and_verification_fails_after_deadline(self):
|
||||
current_time = int(time.time())
|
||||
payload = authorize(
|
||||
current_time - 1, self.signer.address, HexBytes(self.signer.private_key)
|
||||
)
|
||||
with self.assertRaises(MoonstreamAuthorizationExpired):
|
||||
verify(payload)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
|
@ -0,0 +1,11 @@
|
|||
import os
|
||||
|
||||
VERSION = "UNKNOWN"
|
||||
|
||||
try:
|
||||
PATH = os.path.abspath(os.path.dirname(__file__))
|
||||
VERSION_FILE = os.path.join(PATH, "version.txt")
|
||||
with open(VERSION_FILE) as ifp:
|
||||
VERSION = ifp.read().strip()
|
||||
except:
|
||||
pass
|
|
@ -0,0 +1 @@
|
|||
0.0.4
|
|
@ -0,0 +1,76 @@
|
|||
aiohttp==3.8.3
|
||||
aiosignal==1.2.0
|
||||
alembic==1.8.1
|
||||
anyio==3.6.2
|
||||
async-timeout==4.0.2
|
||||
attrs==22.1.0
|
||||
base58==2.1.1
|
||||
bitarray==2.6.0
|
||||
black==22.10.0
|
||||
boto3==1.24.93
|
||||
botocore==1.27.93
|
||||
Brownie==0.5.1
|
||||
bugout==0.2.2
|
||||
certifi==2022.9.24
|
||||
charset-normalizer==2.1.1
|
||||
click==8.1.3
|
||||
cytoolz==0.12.0
|
||||
dataclassy==0.11.1
|
||||
eip712==0.1.0
|
||||
eth-abi==2.2.0
|
||||
eth-account==0.5.9
|
||||
eth-hash==0.5.0
|
||||
eth-keyfile==0.5.1
|
||||
eth-keys==0.3.4
|
||||
eth-rlp==0.2.1
|
||||
eth-typing==2.3.0
|
||||
eth-utils==1.9.5
|
||||
fastapi==0.85.1
|
||||
frozenlist==1.3.1
|
||||
greenlet==1.1.3.post0
|
||||
h11==0.14.0
|
||||
hexbytes==0.2.3
|
||||
idna==3.4
|
||||
importlib-metadata==5.0.0
|
||||
importlib-resources==5.10.0
|
||||
ipfshttpclient==0.8.0a2
|
||||
isort==5.10.1
|
||||
jmespath==1.0.1
|
||||
jsonschema==4.16.0
|
||||
lru-dict==1.1.8
|
||||
Mako==1.2.3
|
||||
MarkupSafe==2.1.1
|
||||
multiaddr==0.0.9
|
||||
multidict==6.0.2
|
||||
mypy==0.982
|
||||
mypy-extensions==0.4.3
|
||||
netaddr==0.8.0
|
||||
parsimonious==0.8.1
|
||||
pathspec==0.10.1
|
||||
pkgutil_resolve_name==1.3.10
|
||||
platformdirs==2.5.2
|
||||
protobuf==3.19.5
|
||||
psycopg2-binary==2.9.4
|
||||
pycryptodome==3.15.0
|
||||
pydantic==1.10.2
|
||||
pyrsistent==0.18.1
|
||||
python-dateutil==2.8.2
|
||||
requests==2.28.1
|
||||
rlp==2.0.1
|
||||
s3transfer==0.6.0
|
||||
six==1.16.0
|
||||
sniffio==1.3.0
|
||||
SQLAlchemy==1.4.42
|
||||
starlette==0.20.4
|
||||
tabulate==0.9.0
|
||||
tomli==2.0.1
|
||||
toolz==0.12.0
|
||||
tqdm==4.64.1
|
||||
typing_extensions==4.4.0
|
||||
urllib3==1.26.12
|
||||
uvicorn==0.18.3
|
||||
varint==1.0.2
|
||||
web3==5.31.1
|
||||
websockets==9.1
|
||||
yarl==1.8.1
|
||||
zipp==3.9.0
|
|
@ -0,0 +1,51 @@
|
|||
from setuptools import find_packages, setup
|
||||
|
||||
with open("engineapi/version.txt") as ifp:
|
||||
VERSION = ifp.read().strip()
|
||||
|
||||
long_description = ""
|
||||
with open("README.md") as ifp:
|
||||
long_description = ifp.read()
|
||||
|
||||
setup(
|
||||
name="engineapi",
|
||||
version=VERSION,
|
||||
packages=find_packages(),
|
||||
install_requires=[
|
||||
"boto3",
|
||||
"bugout>=0.2.2",
|
||||
"eip712==0.1.0",
|
||||
"eth-typing>=2.3.0",
|
||||
"fastapi",
|
||||
"psycopg2-binary",
|
||||
"pydantic",
|
||||
"sqlalchemy",
|
||||
"tqdm",
|
||||
"uvicorn",
|
||||
"web3>=5.30.0, <6",
|
||||
"tabulate",
|
||||
],
|
||||
extras_require={
|
||||
"dev": ["alembic", "black", "mypy", "isort"],
|
||||
"distribute": ["setuptools", "twine", "wheel"],
|
||||
},
|
||||
description="Command line interface for Moonstream Engine API",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
author="Moonstream",
|
||||
author_email="engineering@moonstream.to",
|
||||
classifiers=[
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Programming Language :: Python",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Topic :: Software Development :: Libraries",
|
||||
],
|
||||
python_requires=">=3.8",
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"engineapi=engineapi.cli:main",
|
||||
]
|
||||
},
|
||||
package_data={"engineapi": ["contracts/*.json"]},
|
||||
include_package_data=True,
|
||||
)
|
Ładowanie…
Reference in New Issue