Skip to content

Commit

Permalink
Formatting: Apply consistent formatting with black
Browse files Browse the repository at this point in the history
  • Loading branch information
martomi committed Apr 4, 2021
1 parent 9f95801 commit 70cc4c3
Show file tree
Hide file tree
Showing 16 changed files with 144 additions and 157 deletions.
27 changes: 11 additions & 16 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@


def parse_arguments():
parser = argparse.ArgumentParser(description='ChiaFarmWatch: Watch your crops '
'with a piece in mind for the yield.')
parser.add_argument('--config', required=True, type=str, help='path to config.yaml')
parser = argparse.ArgumentParser(
description="ChiaFarmWatch: Watch your crops " "with a piece in mind for the yield."
)
parser.add_argument("--config", required=True, type=str, help="path to config.yaml")
return parser.parse_args()


Expand All @@ -35,14 +36,16 @@ def get_log_level(log_level: str) -> int:
return logging.INFO


if __name__ == '__main__':
if __name__ == "__main__":
# Parse config and configure logger
args = parse_arguments()
config = Config(Path(args.config))
log_level = get_log_level(config.get_log_level_config())
logging.basicConfig(
format='[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)',
level=log_level, datefmt='%Y-%m-%d %H:%M:%S')
format="[%(asctime)s] [%(levelname)8s] --- %(message)s (%(filename)s:%(lineno)s)",
level=log_level,
datefmt="%Y-%m-%d %H:%M:%S",
)

# Using file log consumer by default TODO: make configurable
chia_logs_config = config.get_chia_logs_config()
Expand All @@ -56,18 +59,11 @@ def get_log_level(log_level: str) -> int:
keep_alive_monitor = KeepAliveMonitor()

# Notify manager is responsible for the lifecycle of all notifiers
notify_manager = NotifyManager(
config=config.get_notifier_config(),
keep_alive_monitor=keep_alive_monitor
)
notify_manager = NotifyManager(config=config.get_notifier_config(), keep_alive_monitor=keep_alive_monitor)

# Link stuff up in the log handler
# Pipeline: Consume -> Handle -> Notify
log_handler = LogHandler(
log_consumer=log_consumer,
notify_manager=notify_manager
)

log_handler = LogHandler(log_consumer=log_consumer, notify_manager=notify_manager)

def interrupt(signal_number, frame):
if signal_number == signal.SIGINT:
Expand All @@ -76,6 +72,5 @@ def interrupt(signal_number, frame):
keep_alive_monitor.stop()
exit(0)


signal.signal(signal.SIGINT, interrupt)
signal.pause()
2 changes: 2 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[tool.black]
line-length = 120
3 changes: 1 addition & 2 deletions src/chia_log/handlers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,7 @@ class that analyses a specific part of the log


class LogHandler(ABC):
"""Common interface for log handlers
"""
"""Common interface for log handlers"""

@abstractmethod
def handle(self, logs: str) -> List[Event]:
Expand Down
44 changes: 14 additions & 30 deletions src/chia_log/handlers/harvester_activity_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,7 @@ class HarvesterActivityHandler(LogHandler):

def __init__(self):
self._parser = HarvesterActivityParser()
self._cond_checkers = [
TimeSinceLastFarmEvent(),
NonDecreasingPlots(),
QuickPlotSearchTime(),
FoundProofs()
]
self._cond_checkers = [TimeSinceLastFarmEvent(), NonDecreasingPlots(), QuickPlotSearchTime(), FoundProofs()]

def handle(self, logs: str) -> List[Event]:
"""Process incoming logs, check all conditions
Expand All @@ -36,12 +31,11 @@ def handle(self, logs: str) -> List[Event]:
# activity have been successfully parsed
if len(activity_messages) > 0:
logging.debug(f"Parsed {len(activity_messages)} activity messages")
events.append(Event(
type=EventType.KEEPALIVE,
priority=EventPriority.NORMAL,
service=EventService.HARVESTER,
message=""
))
events.append(
Event(
type=EventType.KEEPALIVE, priority=EventPriority.NORMAL, service=EventService.HARVESTER, message=""
)
)

# Run messages through all condition checkers
for msg in activity_messages:
Expand Down Expand Up @@ -76,14 +70,13 @@ def check(self, obj: HarvesterActivityMessage) -> Optional[Event]:
seconds_since_last = (obj.timestamp - self._last_timestamp).seconds

if seconds_since_last > self._warning_threshold:
message = f"Harvester did not participate in any challenge for {seconds_since_last} seconds. " \
f"This might indicate networking issues. It's now working again."
message = (
f"Harvester did not participate in any challenge for {seconds_since_last} seconds. "
f"This might indicate networking issues. It's now working again."
)
logging.warning(message)
event = Event(
type=EventType.USER,
priority=EventPriority.NORMAL,
service=EventService.HARVESTER,
message=message
type=EventType.USER, priority=EventPriority.NORMAL, service=EventService.HARVESTER, message=message
)

self._last_timestamp = obj.timestamp
Expand All @@ -109,10 +102,7 @@ def check(self, obj: HarvesterActivityMessage) -> Optional[Event]:
message = f"The total plot count decreased from {self._max_farmed_plots} to {obj.total_plots_count}."
logging.warning(message)
event = Event(
type=EventType.USER,
priority=EventPriority.HIGH,
service=EventService.HARVESTER,
message=message
type=EventType.USER, priority=EventPriority.HIGH, service=EventService.HARVESTER, message=message
)

# Update max plots to prevent repeated alarms
Expand All @@ -135,10 +125,7 @@ def check(self, obj: HarvesterActivityMessage) -> Optional[Event]:
message = f"Seeking plots took too long: {obj.search_time_seconds} seconds!"
logging.warning(message)
return Event(
type=EventType.USER,
priority=EventPriority.NORMAL,
service=EventService.HARVESTER,
message=message
type=EventType.USER, priority=EventPriority.NORMAL, service=EventService.HARVESTER, message=message
)

return None
Expand All @@ -152,10 +139,7 @@ def check(self, obj: HarvesterActivityMessage) -> Optional[Event]:
message = f"Found {obj.found_proofs_count} proofs!"
logging.info(message)
return Event(
type=EventType.USER,
priority=EventPriority.LOW,
service=EventService.HARVESTER,
message=message
type=EventType.USER, priority=EventPriority.LOW, service=EventService.HARVESTER, message=message
)

return None
3 changes: 1 addition & 2 deletions src/chia_log/log_consumer.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,8 +59,7 @@ def stop(self):
def _consume_loop(self):
expanded_user_log_path = self._log_path.expanduser()
logging.info(f"Consuming log file from {expanded_user_log_path}")
f = subprocess.Popen(['tail', '-F', expanded_user_log_path],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
f = subprocess.Popen(["tail", "-F", expanded_user_log_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
while self._is_running:
log_line = f.stdout.readline().decode(encoding="utf-8")
self._notify_subscribers(log_line)
4 changes: 1 addition & 3 deletions src/chia_log/log_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@ class LogHandler(LogConsumerSubscriber):
def __init__(self, log_consumer: LogConsumer, notify_manager: NotifyManager):
log_consumer.subscribe(self)
self._notify_manager = notify_manager
self._handlers = [
HarvesterActivityHandler()
]
self._handlers = [HarvesterActivityHandler()]

def consume_logs(self, logs: str):
for handler in self._handlers:
Expand Down
27 changes: 16 additions & 11 deletions src/chia_log/parsers/harvester_activity_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
@dataclass
class HarvesterActivityMessage:
"""Parsed information from harvester logs"""

timestamp: datetime
eligible_plots_count: int
challenge_hash: str
Expand All @@ -27,9 +28,11 @@ class HarvesterActivityParser:
"""

def __init__(self):
self._regex = re.compile(r'([0-9:.]*) harvester src.harvester.harvester : INFO\s*([0-9]) plots were '
r'eligible for farming ([0-9a-z.]*) Found ([0-9]) proofs. Time: ([0-9.]*) s. '
r'Total ([0-9]*) plots')
self._regex = re.compile(
r"([0-9:.]*) harvester src.harvester.harvester : INFO\s*([0-9]) plots were "
r"eligible for farming ([0-9a-z.]*) Found ([0-9]) proofs. Time: ([0-9.]*) s. "
r"Total ([0-9]*) plots"
)

def parse(self, logs: str) -> List[HarvesterActivityMessage]:
"""Parses all harvester activity messages from a bunch of logs
Expand All @@ -41,13 +44,15 @@ def parse(self, logs: str) -> List[HarvesterActivityMessage]:
parsed_messages = []
matches = self._regex.findall(logs)
for match in matches:
parsed_messages.append(HarvesterActivityMessage(
timestamp=dateutil_parser.parse(match[0]),
eligible_plots_count=int(match[1]),
challenge_hash=match[2],
found_proofs_count=int(match[3]),
search_time_seconds=float(match[4]),
total_plots_count=int(match[5])
))
parsed_messages.append(
HarvesterActivityMessage(
timestamp=dateutil_parser.parse(match[0]),
eligible_plots_count=int(match[1]),
challenge_hash=match[2],
found_proofs_count=int(match[3]),
search_time_seconds=float(match[4]),
total_plots_count=int(match[5]),
)
)

return parsed_messages
2 changes: 1 addition & 1 deletion src/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ def __init__(self, config_path: Path):
if not config_path.is_file():
raise ValueError(f"Invalid config.yaml path: {config_path}")

with open(config_path, 'r') as config_file:
with open(config_path, "r") as config_file:
self._config = yaml.safe_load(config_file)

def _get_child_config(self, key):
Expand Down
32 changes: 16 additions & 16 deletions src/notifier/keep_alive_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,23 @@ class KeepAliveMonitor:
def __init__(self, thresholds: dict = None):
self._notify_manager = None

self._last_keep_alive = {
EventService.HARVESTER: datetime.now()
}
self._last_keep_alive_threshold_seconds = thresholds or {
EventService.HARVESTER: 300
}
self._last_keep_alive = {EventService.HARVESTER: datetime.now()}
self._last_keep_alive_threshold_seconds = thresholds or {EventService.HARVESTER: 300}

# Infer check period from minimum threshold (arbitrary decision)
# Note that this period defines how often high priority notifications
# will be re-triggered so < 5 min is not recommended
self._check_period = float('inf')
self._check_period = float("inf")
for threshold in self._last_keep_alive_threshold_seconds.values():
self._check_period = min(threshold, self._check_period)

logging.info(f"Keep-alive check period: {self._check_period} seconds")
if self._check_period < 300:
logging.warning("Check period below 5 minutes might result "
"in very frequent high priority notifications "
"in case something stops working. Is it intended?")
logging.warning(
"Check period below 5 minutes might result "
"in very frequent high priority notifications "
"in case something stops working. Is it intended?"
)

# Start thread
self._is_running = True
Expand Down Expand Up @@ -67,12 +65,14 @@ def check_last_keep_alive(self):
if seconds_since_last > self._last_keep_alive_threshold_seconds[service]:
message = f"No keep-alive events from harvester for the past {seconds_since_last} seconds"
logging.warning(message)
events.append(Event(
type=EventType.USER,
priority=EventPriority.HIGH,
service=EventService.HARVESTER,
message=message
))
events.append(
Event(
type=EventType.USER,
priority=EventPriority.HIGH,
service=EventService.HARVESTER,
message=message,
)
)
if len(events):
if self._notify_manager:
self._notify_manager.process_events(events)
Expand Down
4 changes: 1 addition & 3 deletions src/notifier/notify_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,9 +22,7 @@ def __init__(self, config: dict, keep_alive_monitor: KeepAliveMonitor = None):
self._initialize_notifiers()

def _initialize_notifiers(self):
key_notifier_mapping = {
"pushover": PushoverNotifier
}
key_notifier_mapping = {"pushover": PushoverNotifier}
for key in self._config.keys():
if key not in key_notifier_mapping.keys():
logging.warning(f"Cannot find mapping for {key} notifier.")
Expand Down
26 changes: 16 additions & 10 deletions src/notifier/pushover_notifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ def __init__(self, config: dict):
logging.info("Initializing Pushover notifier.")
super().__init__()
try:
self.token = config['api_token']
self.user = config['user_key']
self.token = config["api_token"]
self.user = config["user_key"]
except KeyError as key:
logging.error(f"Invalid config.yaml. Missing key: {key}")

Expand All @@ -23,14 +23,20 @@ def send_events_to_user(self, events: List[Event]) -> bool:
for event in events:
if event.type == EventType.USER:
conn = http.client.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.parse.urlencode({
"token": self.token,
"user": self.user,
"title": f"Chia {event.service.name}",
"message": event.message,
"priority": event.priority.value
}), {"Content-type": "application/x-www-form-urlencoded"})
conn.request(
"POST",
"/1/messages.json",
urllib.parse.urlencode(
{
"token": self.token,
"user": self.user,
"title": f"Chia {event.service.name}",
"message": event.message,
"priority": event.priority.value,
}
),
{"Content-type": "application/x-www-form-urlencoded"},
)
response = conn.getresponse()
if response.getcode() != 200:
logging.warning(f"Problem sending event to user, code: {response.getcode()}")
Expand Down
Loading

0 comments on commit 70cc4c3

Please sign in to comment.