kopia lustrzana https://github.com/bugout-dev/moonstream
commit
548a89331e
|
@ -47,6 +47,8 @@ POLYGON_TXPOOL_SERVICE_FILE="polygon-txpool.service"
|
|||
POLYGON_MOONWORM_CRAWLER_SERVICE_FILE="polygon-moonworm-crawler.service"
|
||||
POLYGON_STATE_SERVICE_FILE="polygon-state.service"
|
||||
POLYGON_STATE_TIMER_FILE="polygon-state.timer"
|
||||
POLYGON_STATE_CLEAN_SERVICE_FILE="polygon-state-clean.service"
|
||||
POLYGON_STATE_CLEAN_TIMER_FILE="polygon-state-clean.timer"
|
||||
POLYGON_METADATA_SERVICE_FILE="polygon-metadata.service"
|
||||
POLYGON_METADATA_TIMER_FILE="polygon-metadata.timer"
|
||||
|
||||
|
@ -226,6 +228,15 @@ cp "${SCRIPT_DIR}/${POLYGON_STATE_TIMER_FILE}" "/etc/systemd/system/${POLYGON_ST
|
|||
systemctl daemon-reload
|
||||
systemctl restart --no-block "${POLYGON_STATE_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon state clean service and timer with: ${POLYGON_STATE_CLEAN_SERVICE_FILE}, ${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "/etc/systemd/system/${POLYGON_STATE_CLEAN_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}" "/etc/systemd/system/${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
systemctl daemon-reload
|
||||
systemctl restart --no-block "${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon metadata service and timer with: ${POLYGON_METADATA_SERVICE_FILE}, ${POLYGON_METADATA_TIMER_FILE}"
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
[Unit]
|
||||
Description=Execute state clean labels crawler
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli --access-id "${NB_CONTROLLER_ACCESS_ID}" clean-state-labels --blockchain polygon -N 10000
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=polygon-state-clean
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Execute Polygon state clean labels crawler each 25m
|
||||
|
||||
[Timer]
|
||||
OnBootSec=10s
|
||||
OnUnitActiveSec=25m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -15,7 +15,7 @@ from moonstreamdb.db import (
|
|||
)
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
from .db import view_call_to_label, commit_session
|
||||
from .db import view_call_to_label, commit_session, clean_labels
|
||||
from .Multicall2_interface import Contract as Multicall2
|
||||
from ..settings import (
|
||||
NB_CONTROLLER_ACCESS_ID,
|
||||
|
@ -395,6 +395,33 @@ def parse_abi(args: argparse.Namespace) -> None:
|
|||
json.dump(output_json, f)
|
||||
|
||||
|
||||
def clean_labels_handler(args: argparse.Namespace) -> None:
|
||||
|
||||
blockchain_type = AvailableBlockchainType(args.blockchain)
|
||||
|
||||
web3_client = _retry_connect_web3(
|
||||
blockchain_type=blockchain_type, access_id=args.access_id
|
||||
)
|
||||
|
||||
logger.info(f"Label cleaner connected to blockchain: {blockchain_type}")
|
||||
|
||||
block_number = web3_client.eth.get_block("latest").number # type: ignore
|
||||
|
||||
engine = create_moonstream_engine(
|
||||
MOONSTREAM_DB_URI,
|
||||
pool_pre_ping=True,
|
||||
pool_size=MOONSTREAM_POOL_SIZE,
|
||||
statement_timeout=MOONSTREAM_STATE_CRAWLER_DB_STATEMENT_TIMEOUT_MILLIS,
|
||||
)
|
||||
process_session = sessionmaker(bind=engine)
|
||||
db_session = process_session()
|
||||
|
||||
try:
|
||||
clean_labels(db_session, blockchain_type, args.blocks_cutoff, block_number)
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.set_defaults(func=lambda _: parser.print_help())
|
||||
|
@ -431,6 +458,26 @@ def main() -> None:
|
|||
)
|
||||
view_state_crawler_parser.set_defaults(func=handle_crawl)
|
||||
|
||||
view_state_cleaner = subparsers.add_parser(
|
||||
"clean-state-labels",
|
||||
help="Clean labels from database",
|
||||
)
|
||||
view_state_cleaner.add_argument(
|
||||
"--blockchain",
|
||||
"-b",
|
||||
type=str,
|
||||
help="Type of blovkchain wich writng in database",
|
||||
required=True,
|
||||
)
|
||||
view_state_cleaner.add_argument(
|
||||
"--blocks-cutoff",
|
||||
"-N",
|
||||
required=True,
|
||||
type=int,
|
||||
help="Amount blocks back, after wich data will be remove.",
|
||||
)
|
||||
view_state_cleaner.set_defaults(func=clean_labels_handler)
|
||||
|
||||
generate_view_parser = subparsers.add_parser(
|
||||
"parse-abi",
|
||||
help="Parse view methods from the abi file.",
|
||||
|
|
|
@ -59,3 +59,35 @@ def commit_session(db_session: Session) -> None:
|
|||
logger.error(f"Failed to save labels: {e}")
|
||||
db_session.rollback()
|
||||
raise e
|
||||
|
||||
|
||||
def clean_labels(
|
||||
db_session: Session,
|
||||
blockchain_type: AvailableBlockchainType,
|
||||
blocks_cutoff: int,
|
||||
block_number: int,
|
||||
) -> None:
|
||||
"""
|
||||
Remove all labels with the given name from the database.
|
||||
"""
|
||||
|
||||
label_model = get_label_model(blockchain_type)
|
||||
|
||||
table = label_model.__tablename__
|
||||
print(f"Cleaning labels from table {table}")
|
||||
print(f"Current block number: {block_number} - blocks cutoff: {blocks_cutoff}")
|
||||
print(f"Deleting labels with block_number < {block_number - blocks_cutoff}")
|
||||
|
||||
try:
|
||||
logger.info("Removing labels from database")
|
||||
query = db_session.query(label_model).filter(
|
||||
label_model.label == VIEW_STATE_CRAWLER_LABEL,
|
||||
label_model.block_number < block_number - blocks_cutoff,
|
||||
)
|
||||
result = query.delete(synchronize_session=False)
|
||||
db_session.commit()
|
||||
logger.info(f"Removed {result} rows from {table}")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove labels: {e}")
|
||||
db_session.rollback()
|
||||
raise e
|
||||
|
|
Ładowanie…
Reference in New Issue