diff --git a/crawlers/deploy/deploy.bash b/crawlers/deploy/deploy.bash
index e46b719e..51889cae 100755
--- a/crawlers/deploy/deploy.bash
+++ b/crawlers/deploy/deploy.bash
@@ -47,6 +47,8 @@ POLYGON_TXPOOL_SERVICE_FILE="polygon-txpool.service"
POLYGON_MOONWORM_CRAWLER_SERVICE_FILE="polygon-moonworm-crawler.service"
POLYGON_STATE_SERVICE_FILE="polygon-state.service"
POLYGON_STATE_TIMER_FILE="polygon-state.timer"
+POLYGON_STATE_CLEAN_SERVICE_FILE="polygon-state-clean.service"
+POLYGON_STATE_CLEAN_TIMER_FILE="polygon-state-clean.timer"
POLYGON_METADATA_SERVICE_FILE="polygon-metadata.service"
POLYGON_METADATA_TIMER_FILE="polygon-metadata.timer"
@@ -226,6 +228,15 @@ cp "${SCRIPT_DIR}/${POLYGON_STATE_TIMER_FILE}" "/etc/systemd/system/${POLYGON_ST
systemctl daemon-reload
systemctl restart --no-block "${POLYGON_STATE_TIMER_FILE}"
+echo
+echo
+echo -e "${PREFIX_INFO} Replacing existing Polygon state clean service and timer with: ${POLYGON_STATE_CLEAN_SERVICE_FILE}, ${POLYGON_STATE_CLEAN_TIMER_FILE}"
+chmod 644 "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}"
+cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "/etc/systemd/system/${POLYGON_STATE_CLEAN_SERVICE_FILE}"
+cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}" "/etc/systemd/system/${POLYGON_STATE_CLEAN_TIMER_FILE}"
+systemctl daemon-reload
+systemctl restart --no-block "${POLYGON_STATE_CLEAN_TIMER_FILE}"
+
echo
echo
echo -e "${PREFIX_INFO} Replacing existing Polygon metadata service and timer with: ${POLYGON_METADATA_SERVICE_FILE}, ${POLYGON_METADATA_TIMER_FILE}"
diff --git a/crawlers/deploy/ethereum-missing.timer b/crawlers/deploy/ethereum-missing.timer
index 87fc6c16..44b09a06 100644
--- a/crawlers/deploy/ethereum-missing.timer
+++ b/crawlers/deploy/ethereum-missing.timer
@@ -2,7 +2,7 @@
Description=Fill missing blocks at Ethereum database
[Timer]
-OnBootSec=10s
+OnBootSec=40s
OnUnitActiveSec=15m
[Install]
diff --git a/crawlers/deploy/ethereum-trending.timer b/crawlers/deploy/ethereum-trending.timer
index 511be4ee..6dd8c93c 100644
--- a/crawlers/deploy/ethereum-trending.timer
+++ b/crawlers/deploy/ethereum-trending.timer
@@ -2,7 +2,7 @@
Description=Load trending Ethereum addresses to the database every 5 minutes
[Timer]
-OnBootSec=10s
+OnBootSec=60s
OnUnitActiveSec=5m
[Install]
diff --git a/crawlers/deploy/polygon-metadata.timer b/crawlers/deploy/polygon-metadata.timer
index 6c91fb30..d1ebe9d0 100644
--- a/crawlers/deploy/polygon-metadata.timer
+++ b/crawlers/deploy/polygon-metadata.timer
@@ -2,7 +2,7 @@
Description=Execute Polygon metadata crawler each 10m
[Timer]
-OnBootSec=10s
+OnBootSec=20s
OnUnitActiveSec=60m
[Install]
diff --git a/crawlers/deploy/polygon-missing.timer b/crawlers/deploy/polygon-missing.timer
index 9fd8a52e..58eb0ce3 100644
--- a/crawlers/deploy/polygon-missing.timer
+++ b/crawlers/deploy/polygon-missing.timer
@@ -2,7 +2,7 @@
Description=Fill missing blocks at Polygon database
[Timer]
-OnBootSec=10s
+OnBootSec=30s
OnUnitActiveSec=15m
[Install]
diff --git a/crawlers/deploy/polygon-state-clean.service b/crawlers/deploy/polygon-state-clean.service
new file mode 100644
index 00000000..857bea42
--- /dev/null
+++ b/crawlers/deploy/polygon-state-clean.service
@@ -0,0 +1,13 @@
+[Unit]
+Description=Execute state clean labels crawler
+After=network.target
+
+[Service]
+Type=oneshot
+User=ubuntu
+Group=www-data
+WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
+EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
+ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" clean-state-labels --blockchain polygon -N 10000
+CPUWeight=60
+SyslogIdentifier=polygon-state-clean
diff --git a/crawlers/deploy/polygon-state-clean.timer b/crawlers/deploy/polygon-state-clean.timer
new file mode 100644
index 00000000..cfb37c0c
--- /dev/null
+++ b/crawlers/deploy/polygon-state-clean.timer
@@ -0,0 +1,9 @@
+[Unit]
+Description=Execute Polygon state clean labels crawler each 25m
+
+[Timer]
+OnBootSec=50s
+OnUnitActiveSec=25m
+
+[Install]
+WantedBy=timers.target
diff --git a/crawlers/deploy/polygon-state.timer b/crawlers/deploy/polygon-state.timer
index a6f7911c..76ea17be 100644
--- a/crawlers/deploy/polygon-state.timer
+++ b/crawlers/deploy/polygon-state.timer
@@ -2,7 +2,7 @@
Description=Execute Polygon state crawler each 10m
[Timer]
-OnBootSec=10s
+OnBootSec=15s
OnUnitActiveSec=10m
[Install]
diff --git a/crawlers/deploy/polygon-statistics.timer b/crawlers/deploy/polygon-statistics.timer
index 6cf3f020..0cab38da 100644
--- a/crawlers/deploy/polygon-statistics.timer
+++ b/crawlers/deploy/polygon-statistics.timer
@@ -2,7 +2,7 @@
Description=Update Polygon statistics dashboards each 6 hours
[Timer]
-OnBootSec=10s
+OnBootSec=20s
OnUnitActiveSec=6h
[Install]
diff --git a/crawlers/deploy/xdai-missing.timer b/crawlers/deploy/xdai-missing.timer
index a882c4fd..66285c66 100644
--- a/crawlers/deploy/xdai-missing.timer
+++ b/crawlers/deploy/xdai-missing.timer
@@ -2,7 +2,7 @@
Description=Fill missing blocks at XDai database
[Timer]
-OnBootSec=10s
+OnBootSec=35s
OnUnitActiveSec=15m
[Install]
diff --git a/crawlers/deploy/xdai-statistics.timer b/crawlers/deploy/xdai-statistics.timer
index 8646d085..8ef61b51 100644
--- a/crawlers/deploy/xdai-statistics.timer
+++ b/crawlers/deploy/xdai-statistics.timer
@@ -2,7 +2,7 @@
Description=Update XDai statistics dashboards each 6 hours
[Timer]
-OnBootSec=10s
+OnBootSec=25s
OnUnitActiveSec=6h
[Install]
diff --git a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/cli.py
index 43606f45..98900c28 100644
--- a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/cli.py
+++ b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/cli.py
@@ -119,7 +119,9 @@ def handle_historical_crawl(args: argparse.Namespace) -> None:
blockchain_type = AvailableBlockchainType(args.blockchain_type)
subscription_type = blockchain_type_to_subscription_type(blockchain_type)
- addresses_filter = [args.address]
+ addresses_filter = []
+ if args.address is not None:
+ addresses_filter = [args.address]
all_event_jobs = make_event_crawl_jobs(
get_crawl_job_entries(
subscription_type,
@@ -129,9 +131,12 @@ def handle_historical_crawl(args: argparse.Namespace) -> None:
)
filtered_event_jobs = []
for job in all_event_jobs:
- intersection = [
- address for address in job.contracts if address in addresses_filter
- ]
+ if addresses_filter:
+ intersection = [
+ address for address in job.contracts if address in addresses_filter
+ ]
+ else:
+ intersection = job.contracts
if intersection:
job.contracts = intersection
filtered_event_jobs.append(job)
@@ -145,11 +150,14 @@ def handle_historical_crawl(args: argparse.Namespace) -> None:
MOONSTREAM_MOONWORM_TASKS_JOURNAL,
)
)
- filtered_function_call_jobs = [
- job
- for job in all_function_call_jobs
- if job.contract_address in addresses_filter
- ]
+ if addresses_filter:
+ filtered_function_call_jobs = [
+ job
+ for job in all_function_call_jobs
+ if job.contract_address in addresses_filter
+ ]
+ else:
+ filtered_function_call_jobs = all_function_call_jobs
if args.only_events:
filtered_function_call_jobs = []
@@ -336,7 +344,7 @@ def main() -> None:
historical_crawl_parser.add_argument(
"--address",
"-a",
- required=True,
+ required=False,
type=str,
)
historical_crawl_parser.add_argument(
diff --git a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/event_crawler.py b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/event_crawler.py
index c6330c68..ddbbb662 100644
--- a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/event_crawler.py
+++ b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/event_crawler.py
@@ -1,9 +1,9 @@
import logging
from dataclasses import dataclass
-from typing import Any, Dict, List, Optional
+from typing import Any, Dict, List, Optional, Tuple
from moonstreamdb.blockchain import AvailableBlockchainType, get_block_model
-from moonworm.crawler.log_scanner import _fetch_events_chunk # type: ignore
+from moonworm.crawler.log_scanner import _fetch_events_chunk, _crawl_events as moonworm_autoscale_crawl_events # type: ignore
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import and_
from web3 import Web3
@@ -68,11 +68,11 @@ def get_block_timestamp(
block_model = get_block_model(blockchain_type)
blocks = (
- db_session.query(block_model)
+ db_session.query(block_model.block_number, block_model.timestamp)
.filter(
and_(
- block_model.block_number >= block_number,
- block_model.block_number <= block_number + max_blocks_batch - 1,
+ block_model.block_number >= block_number - max_blocks_batch - 1,
+ block_model.block_number <= block_number + max_blocks_batch + 1,
)
)
.order_by(block_model.block_number.asc())
@@ -86,7 +86,7 @@ def get_block_timestamp(
if target_block_timestamp is None:
target_block_timestamp = _get_block_timestamp_from_web3(web3, block_number)
- if len(blocks_cache) > max_blocks_batch * 2:
+ if len(blocks_cache) > (max_blocks_batch * 3 + 2):
blocks_cache.clear()
blocks_cache[block_number] = target_block_timestamp
@@ -139,3 +139,52 @@ def _crawl_events(
all_events.append(event)
return all_events
+
+
+def _autoscale_crawl_events(
+ db_session: Session,
+ blockchain_type: AvailableBlockchainType,
+ web3: Web3,
+ jobs: List[EventCrawlJob],
+ from_block: int,
+ to_block: int,
+ blocks_cache: Dict[int, int] = {},
+ batch_size: int = 1000,
+ db_block_query_batch=10,
+) -> Tuple[List[Event], int]:
+
+ """
+ Crawl events with auto regulated batch_size.
+ """
+ all_events = []
+ for job in jobs:
+
+ raw_events, batch_size = moonworm_autoscale_crawl_events(
+ web3,
+ job.event_abi,
+ from_block,
+ to_block,
+ batch_size,
+ job.contracts[0],
+ )
+ for raw_event in raw_events:
+ raw_event["blockTimestamp"] = get_block_timestamp(
+ db_session,
+ web3,
+ blockchain_type,
+ raw_event["blockNumber"],
+ blocks_cache,
+ db_block_query_batch,
+ )
+ event = Event(
+ event_name=raw_event["event"],
+ args=raw_event["args"],
+ address=raw_event["address"],
+ block_number=raw_event["blockNumber"],
+ block_timestamp=raw_event["blockTimestamp"],
+ transaction_hash=raw_event["transactionHash"],
+ log_index=raw_event["logIndex"],
+ )
+ all_events.append(event)
+
+ return all_events, batch_size
diff --git a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/function_call_crawler.py b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/function_call_crawler.py
index 8ee09df9..cf08c52a 100644
--- a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/function_call_crawler.py
+++ b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/function_call_crawler.py
@@ -10,7 +10,7 @@ from moonworm.crawler.moonstream_ethereum_state_provider import ( # type: ignor
MoonstreamEthereumStateProvider,
)
from moonworm.crawler.networks import Network # type: ignore
-from moonworm.cu_watch import MockState # type: ignore
+from moonworm.watch import MockState # type: ignore
from sqlalchemy.orm import Session
from web3 import Web3
diff --git a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/historical_crawler.py b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/historical_crawler.py
index 852aa7c0..bae5190e 100644
--- a/crawlers/mooncrawl/mooncrawl/moonworm_crawler/historical_crawler.py
+++ b/crawlers/mooncrawl/mooncrawl/moonworm_crawler/historical_crawler.py
@@ -13,7 +13,7 @@ from web3 import Web3
from .crawler import EventCrawlJob, FunctionCallCrawlJob, _retry_connect_web3
from .db import add_events_to_session, add_function_calls_to_session, commit_session
-from .event_crawler import _crawl_events
+from .event_crawler import _crawl_events, _autoscale_crawl_events
from .function_call_crawler import _crawl_functions
logging.basicConfig(level=logging.INFO)
@@ -71,26 +71,41 @@ def historical_crawler(
)
logger.info(f"Crawling events from {start_block} to {batch_end_block}")
- all_events = _crawl_events(
- db_session=db_session,
- blockchain_type=blockchain_type,
- web3=web3,
- jobs=event_crawl_jobs,
- from_block=batch_end_block,
- to_block=start_block,
- blocks_cache=blocks_cache,
- db_block_query_batch=max_blocks_batch,
- )
+
+ if function_call_crawl_jobs:
+ all_events = _crawl_events(
+ db_session=db_session,
+ blockchain_type=blockchain_type,
+ web3=web3,
+ jobs=event_crawl_jobs,
+ from_block=batch_end_block,
+ to_block=start_block,
+ blocks_cache=blocks_cache,
+ db_block_query_batch=max_blocks_batch,
+ )
+
+ else:
+
+ all_events, max_blocks_batch = _autoscale_crawl_events(
+ db_session=db_session,
+ blockchain_type=blockchain_type,
+ web3=web3,
+ jobs=event_crawl_jobs,
+ from_block=batch_end_block,
+ to_block=start_block,
+ blocks_cache=blocks_cache,
+ db_block_query_batch=max_blocks_batch,
+ )
logger.info(
f"Crawled {len(all_events)} events from {start_block} to {batch_end_block}."
)
add_events_to_session(db_session, all_events, blockchain_type)
- logger.info(
- f"Crawling function calls from {start_block} to {batch_end_block}"
- )
if function_call_crawl_jobs:
+ logger.info(
+ f"Crawling function calls from {start_block} to {batch_end_block}"
+ )
all_function_calls = _crawl_functions(
blockchain_type,
ethereum_state_provider,
diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py
index 8b86d089..b3b70842 100644
--- a/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py
+++ b/crawlers/mooncrawl/mooncrawl/state_crawler/cli.py
@@ -15,7 +15,7 @@ from moonstreamdb.db import (
)
from sqlalchemy.orm import sessionmaker
-from .db import view_call_to_label, commit_session
+from .db import view_call_to_label, commit_session, clean_labels
from .Multicall2_interface import Contract as Multicall2
from ..settings import (
NB_CONTROLLER_ACCESS_ID,
@@ -395,6 +395,33 @@ def parse_abi(args: argparse.Namespace) -> None:
json.dump(output_json, f)
+def clean_labels_handler(args: argparse.Namespace) -> None:
+
+ blockchain_type = AvailableBlockchainType(args.blockchain)
+
+ web3_client = _retry_connect_web3(
+ blockchain_type=blockchain_type, access_id=args.access_id
+ )
+
+ logger.info(f"Label cleaner connected to blockchain: {blockchain_type}")
+
+ block_number = web3_client.eth.get_block("latest").number # type: ignore
+
+ engine = create_moonstream_engine(
+ MOONSTREAM_DB_URI,
+ pool_pre_ping=True,
+ pool_size=MOONSTREAM_POOL_SIZE,
+ statement_timeout=MOONSTREAM_STATE_CRAWLER_DB_STATEMENT_TIMEOUT_MILLIS,
+ )
+ process_session = sessionmaker(bind=engine)
+ db_session = process_session()
+
+ try:
+ clean_labels(db_session, blockchain_type, args.blocks_cutoff, block_number)
+ finally:
+ db_session.close()
+
+
def main() -> None:
parser = argparse.ArgumentParser()
parser.set_defaults(func=lambda _: parser.print_help())
@@ -431,6 +458,26 @@ def main() -> None:
)
view_state_crawler_parser.set_defaults(func=handle_crawl)
+ view_state_cleaner = subparsers.add_parser(
+ "clean-state-labels",
+ help="Clean labels from database",
+ )
+ view_state_cleaner.add_argument(
+ "--blockchain",
+ "-b",
+ type=str,
+ help="Type of blovkchain wich writng in database",
+ required=True,
+ )
+ view_state_cleaner.add_argument(
+ "--blocks-cutoff",
+ "-N",
+ required=True,
+ type=int,
+ help="Amount blocks back, after wich data will be remove.",
+ )
+ view_state_cleaner.set_defaults(func=clean_labels_handler)
+
generate_view_parser = subparsers.add_parser(
"parse-abi",
help="Parse view methods from the abi file.",
diff --git a/crawlers/mooncrawl/mooncrawl/state_crawler/db.py b/crawlers/mooncrawl/mooncrawl/state_crawler/db.py
index e2255a49..fd0d70e8 100644
--- a/crawlers/mooncrawl/mooncrawl/state_crawler/db.py
+++ b/crawlers/mooncrawl/mooncrawl/state_crawler/db.py
@@ -59,3 +59,35 @@ def commit_session(db_session: Session) -> None:
logger.error(f"Failed to save labels: {e}")
db_session.rollback()
raise e
+
+
+def clean_labels(
+ db_session: Session,
+ blockchain_type: AvailableBlockchainType,
+ blocks_cutoff: int,
+ block_number: int,
+) -> None:
+ """
+ Remove all labels with the given name from the database.
+ """
+
+ label_model = get_label_model(blockchain_type)
+
+ table = label_model.__tablename__
+ print(f"Cleaning labels from table {table}")
+ print(f"Current block number: {block_number} - blocks cutoff: {blocks_cutoff}")
+ print(f"Deleting labels with block_number < {block_number - blocks_cutoff}")
+
+ try:
+ logger.info("Removing labels from database")
+ query = db_session.query(label_model).filter(
+ label_model.label == VIEW_STATE_CRAWLER_LABEL,
+ label_model.block_number < block_number - blocks_cutoff,
+ )
+ result = query.delete(synchronize_session=False)
+ db_session.commit()
+ logger.info(f"Removed {result} rows from {table}")
+ except Exception as e:
+ logger.error(f"Failed to remove labels: {e}")
+ db_session.rollback()
+ raise e
diff --git a/crawlers/mooncrawl/mooncrawl/version.py b/crawlers/mooncrawl/mooncrawl/version.py
index 62b655e9..99d4f330 100644
--- a/crawlers/mooncrawl/mooncrawl/version.py
+++ b/crawlers/mooncrawl/mooncrawl/version.py
@@ -2,4 +2,4 @@
Moonstream crawlers version.
"""
-MOONCRAWL_VERSION = "0.2.3"
+MOONCRAWL_VERSION = "0.2.4"
diff --git a/crawlers/mooncrawl/setup.py b/crawlers/mooncrawl/setup.py
index 0820b64e..b6efe31f 100644
--- a/crawlers/mooncrawl/setup.py
+++ b/crawlers/mooncrawl/setup.py
@@ -38,7 +38,7 @@ setup(
"chardet",
"fastapi",
"moonstreamdb>=0.3.2",
- "moonworm==0.2.4",
+ "moonworm[moonstream]==0.5.1",
"humbug",
"pydantic",
"python-dateutil",
diff --git a/frontend/.eslintrc.json b/frontend/.eslintrc.json
index f2b28371..41faee4f 100644
--- a/frontend/.eslintrc.json
+++ b/frontend/.eslintrc.json
@@ -91,7 +91,16 @@
],
"react-hooks/rules-of-hooks": "error",
"react-hooks/exhaustive-deps": "warn",
- "prettier/prettier": "warn"
+ "prettier/prettier": "warn",
+ "react/no-unknown-property": [
+ 2,
+ {
+ "ignore": [
+ "jsx",
+ "global"
+ ]
+ }
+ ]
}
}
diff --git a/frontend/pages/contact/index.js b/frontend/pages/contact/index.js
index d9c9db26..5183640a 100644
--- a/frontend/pages/contact/index.js
+++ b/frontend/pages/contact/index.js
@@ -5,7 +5,7 @@ import HubspotForm from "react-hubspot-form";
import { useRouter } from "next/router";
import { BiArrowBack } from "react-icons/bi";
-const Contact = (props) => {
+const Contact = () => {
const router = useRouter();
const formId = "b54d192f-59b1-410a-8ac1-a1e8383c423c";
diff --git a/frontend/pages/team/index.js b/frontend/pages/team/index.js
index 47c130c3..eb57a5c8 100644
--- a/frontend/pages/team/index.js
+++ b/frontend/pages/team/index.js
@@ -4,6 +4,7 @@ import {
Text,
Flex,
Link,
+ Image as ChakraImage,
Stack,
chakra,
useMediaQuery,
@@ -11,31 +12,34 @@ import {
ListItem,
Box,
SimpleGrid,
+ Center,
} from "@chakra-ui/react";
-import { DEFAULT_METATAGS, AWS_ASSETS_PATH } from "../../src/core/constants";
+import { AWS_ASSETS_PATH } from "../../src/core/constants";
import UIContext from "../../src/core/providers/UIProvider/context";
import TeamCard from "../../src/components/TeamCard";
+import { getLayout, getLayoutProps } from "../../src/layouts/WideInfoPage";
+
+const TEAM_PATH = `${AWS_ASSETS_PATH}/team`;
const assets = {
background720: `${AWS_ASSETS_PATH}/blog-background-720x405.png`,
background1920: `${AWS_ASSETS_PATH}/blog-background-720x405.png`,
background2880: `${AWS_ASSETS_PATH}/blog-background-720x405.png`,
background3840: `${AWS_ASSETS_PATH}/blog-background-720x405.png`,
- team: `${AWS_ASSETS_PATH}/Team-page-illustration.png`,
- dragonfly: `${AWS_ASSETS_PATH}/dragonfly.jpg`,
- ladybird: `${AWS_ASSETS_PATH}/ladybird.jpg`,
- locust: `${AWS_ASSETS_PATH}/locust.jpg`,
- mantis: `${AWS_ASSETS_PATH}/mantis.jpg`,
- centipede: `${AWS_ASSETS_PATH}/centipede.jpg`,
- spider: `${AWS_ASSETS_PATH}/spider.jpg`,
- ant: `${AWS_ASSETS_PATH}/ant.jpg`,
- firefly: `${AWS_ASSETS_PATH}/firefly.jpg`,
- scarab: `${AWS_ASSETS_PATH}/scarab.jpg`,
- bee: `${AWS_ASSETS_PATH}/bee.jpg`,
- weta: `${AWS_ASSETS_PATH}/weta.jpg`,
+ rocket: `${TEAM_PATH}/rocket.png`,
+ ant: `${TEAM_PATH}/ant.png`,
+ bee: `${TEAM_PATH}/bee.png`,
+ centipede: `${TEAM_PATH}/centipede.png`,
+ firefly: `${TEAM_PATH}/firefly.png`,
+ ladybug: `${TEAM_PATH}/ladybug.png`,
+ locust: `${TEAM_PATH}/locust.png`,
+ mantis: `${TEAM_PATH}/mantis.png`,
+ scarab: `${TEAM_PATH}/scarab.png`,
+ spider: `${TEAM_PATH}/carpenter-spider.png`,
+ weta: `${TEAM_PATH}/weta.png`,
};
-const Product = () => {
+const Team = () => {
const ui = useContext(UIContext);
const [background, setBackground] = useState("background720");
const [backgroundLoaded720, setBackgroundLoaded720] = useState(false);
@@ -139,7 +143,7 @@ const Product = () => {
px={12}
alignItems="start"
columns={{ base: 1, md: 2 }}
- // mb={24}
+ mb={24}
spacingY={{ base: 10, md: 32 }}
spacingX={{ base: 10, md: 24 }}
>
@@ -165,14 +169,9 @@ const Product = () => {
-