Skip to content

Commit

Permalink
fix: adjust a bit emojis
Browse files Browse the repository at this point in the history
  • Loading branch information
aimxhaisse committed Jun 25, 2024
1 parent 98c316d commit 40537f7
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 14 deletions.
4 changes: 2 additions & 2 deletions eth_validator_watcher/clock.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __init__(self, genesis: int, slot_duration: int, slots_per_epoch: int, start
self._start_at = start_at

if start_at:
logging.info(f'⏲️ Starting clock at timestamp @ {start_at} ⏲️')
logging.info(f'⏲️ Starting clock at timestamp @ {start_at}')

def now(self) -> float:
"""Get the current time in seconds since the epoch.
Expand Down Expand Up @@ -76,5 +76,5 @@ def maybe_wait_for_slot(self, slot: int) -> None:
target = self._genesis + slot * self._slot_duration + self._lag_seconds
now = self.now()
if now < target:
logging.info(f'⏳Waiting {target - now:.2f} seconds for slot {slot}')
logging.info(f'⏳Waiting {target - now:.2f} seconds for slot {slot}')
time.sleep(target - now)
8 changes: 4 additions & 4 deletions eth_validator_watcher/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def load_config(config_file: str) -> Config:
The effective configuration used by the watcher
"""
with open(config_file, 'r') as fh:
logging.info(f'⚙️ Parsing configuration file {config_file} ⚙️')
logging.info(f'⚙️ Parsing configuration file {config_file}')

# We support json for large configuration files (500 MiB)
# which can take time to parse with PyYAML.
Expand All @@ -62,19 +62,19 @@ def load_config(config_file: str) -> Config:
else:
config = yaml.load(fh, Loader=yaml.CLoader) or dict()

logging.info(f'⚙️ Validating configuration file ⚙️')
logging.info(f'⚙️ Validating configuration file')
from_default = _default_config().model_dump()
from_env = Config().model_dump()
from_file = Config(**config).model_dump()

logging.info(f'⚙️ Merging with environment variables ⚙️')
logging.info(f'⚙️ Merging with environment variables')
merged = from_default.copy()

merged.update({k: v for k, v in from_file.items() if v})
merged.update({k: v for k, v in from_env.items() if v})

r = Config(**merged)

logging.info(f'⚙️ Configuration file is ready ⚙️')
logging.info(f'⚙️ Configuration file is ready')

return r
14 changes: 7 additions & 7 deletions eth_validator_watcher/entrypoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,20 +149,20 @@ def run(self) -> None:
last_processed_finalized_slot = None

while True:
logging.info(f'🧮 Processing slot {slot} 🧮')
logging.info(f'🔨 Processing slot {slot}')

last_finalized_slot = self._beacon.get_header(BlockIdentierType.FINALIZED).data.header.message.slot
self._schedule.update(self._beacon, slot, last_processed_finalized_slot, last_finalized_slot)

if beacon_validators == None or (slot % self._spec.data.SLOTS_PER_EPOCH == 0):
logging.info(f'🧮 Processing epoch {epoch} 🧮')
logging.info(f'🔨 Processing epoch {epoch}')
beacon_validators = self._beacon.get_validators(self._clock.epoch_to_slot(epoch))
watched_validators.process_epoch(beacon_validators)
if not watched_validators.config_initialized:
watched_validators.process_config(self._cfg)

if validators_liveness == None or (slot % self._spec.data.SLOTS_PER_EPOCH == SLOT_FOR_MISSED_ATTESTATIONS_PROCESS):
logging.info('🧮 Processing validator liveness 🧮')
logging.info('🔨 Processing validator liveness')
validators_liveness = self._beacon.get_validators_liveness(epoch - 1, watched_validators.get_indexes())
watched_validators.process_liveness(validators_liveness)

Expand All @@ -174,25 +174,25 @@ def run(self) -> None:
if not has_block:
rewards = None
else:
logging.info('🧮 Trying to process rewards 🧮')
logging.info('🔨 Trying to process rewards')
rewards = self._beacon.get_rewards(epoch - 2)
process_rewards(watched_validators, rewards)

process_block(watched_validators, self._schedule, slot, has_block)
process_future_blocks(watched_validators, self._schedule, slot)

while last_processed_finalized_slot and last_processed_finalized_slot < last_finalized_slot:
logging.info(f'🧮 Processing finalized slot from {last_processed_finalized_slot or last_finalized_slot} to {last_finalized_slot} 🧮')
logging.info(f'🔨 Processing finalized slot from {last_processed_finalized_slot or last_finalized_slot} to {last_finalized_slot}')
has_block = self._beacon.has_block_at_slot(last_processed_finalized_slot)
process_finalized_block(watched_validators, self._schedule, last_processed_finalized_slot, has_block)
last_processed_finalized_slot += 1
last_processed_finalized_slot = last_finalized_slot

logging.info('🧮 Updating Prometheus metrics 🧮')
logging.info('🔨 Updating Prometheus metrics')
self._update_metrics(watched_validators, epoch, slot)

if (slot % self._spec.data.SLOTS_PER_EPOCH == SLOT_FOR_CONFIG_RELOAD):
logging.info('🧮 Processing configuration update 🧮')
logging.info('🔨 Processing configuration update')
self._reload_config()
watched_validators.process_config(self._cfg)

Expand Down
2 changes: 1 addition & 1 deletion eth_validator_watcher/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def compute_validator_metrics(validators: dict[int, WatchedValidator], slot: int
Returns:
dict[str, MetricsByLabel]
"""
logging.info(f"📊 Computing metrics for {len(validators)} validators 📊")
logging.info(f"📊 Computing metrics for {len(validators)} validators")
metrics = fast_compute_validator_metrics(validators)

for _, v in validators.items():
Expand Down

0 comments on commit 40537f7

Please sign in to comment.