From 526135629c5e209a09d63b6945d040cd4ed3ad6d Mon Sep 17 00:00:00 2001 From: sadnub Date: Thu, 11 Feb 2021 17:05:36 -0500 Subject: [PATCH] fix some typos and implement runscript and runscriptfull on agent function --- api/tacticalrmm/agents/models.py | 60 ++++++++++++++++++++--------- api/tacticalrmm/autotasks/models.py | 54 +++++++++++++++++++++----- api/tacticalrmm/checks/models.py | 54 +++++++++++++++++--------- 3 files changed, 122 insertions(+), 46 deletions(-) diff --git a/api/tacticalrmm/agents/models.py b/api/tacticalrmm/agents/models.py index cac00346..3aff49ef 100644 --- a/api/tacticalrmm/agents/models.py +++ b/api/tacticalrmm/agents/models.py @@ -8,8 +8,7 @@ import validators import msgpack import re from collections import Counter -from typing import List -from typing import Union +from typing import List, Union, Any from loguru import logger import asyncio @@ -277,11 +276,12 @@ class Agent(BaseAuditModel): script: Script, args: List[str] = [], timeout: int = 120, + full: bool = False, wait: bool = False, run_on_any=False, - ): + ) -> Any: data = { - "func": "runscript", + "func": "runscriptfull" if full else "runscript", "timeout": timeout, "script_args": args, "payload": { @@ -289,9 +289,10 @@ class Agent(BaseAuditModel): "shell": script.shell, }, } + running_agent = self if run_on_any: - nats_ping = {"func": "ping", "timeout": 2} + nats_ping = {"func": "ping", "timeout": 1} # try on self first r = asyncio.run(self.nats_cmd(nats_ping)) @@ -306,17 +307,23 @@ class Agent(BaseAuditModel): ) if agent.status == "online" ] + for agent in online: r = asyncio.run(agent.nats_cmd(nats_ping)) if r == "pong": running_agent = agent break + if running_agent.pk == self.pk: + return "Unable to find an online agent" + if wait: - return asyncio.run(running_agent.nats_cmd(data, timeout=timeout)) + return asyncio.run(running_agent.nats_cmd(data, timeout=timeout, wait=True)) else: asyncio.run(running_agent.nats_cmd(data, wait=False)) + return "ok" + # auto approves updates def approve_updates(self): patch_policy = self.get_patch_policy() @@ -732,16 +739,24 @@ class Agent(BaseAuditModel): alert_template.resolved_action_args, timeout=15, wait=True, + full=True, run_on_any=True, ) - alert.resolved_action_retcode = r["retcode"] - alert.resolved_action_stdout = r["stderr"] - alert.resolved_action_stderr = r["stderr"] - alert.resolved_action_execution_time = "{:.4f}".format( - r["execution_time"] - ) - alert.save() + # command was successful + if type(r) == dict: + alert.resolved_action_retcode = r["retcode"] + alert.resolved_action_stdout = r["stdout"] + alert.resolved_action_stderr = r["stderr"] + alert.resolved_action_execution_time = "{:.4f}".format( + r["execution_time"] + ) + alert.resolved_action_run = True + alert.save() + else: + logger.error( + f"Resolved action: {alert_template.resolved_action} failed to run on any agent for {self.hostname} resolved outage" + ) # called when agent is offline else: @@ -795,20 +810,27 @@ class Agent(BaseAuditModel): # check if any scripts should be run if not alert.action_run and alert_template and alert_template.action: - # attempt to run on agent, but probably won't work since it is offline r = self.run_script( alert_template.action, alert_template.action_args, timeout=15, wait=True, + full=True, run_on_any=True, ) - alert.action_retcode = r["retcode"] - alert.action_stdout = r["stderr"] - alert.action_stderr = r["stderr"] - alert.action_execution_time = "{:.4f}".format(r["execution_time"]) - alert.save() + # command was successful + if type(r) == dict: + alert.action_retcode = r["retcode"] + alert.action_stdout = r["stdout"] + alert.action_stderr = r["stderr"] + alert.action_execution_time = "{:.4f}".format(r["execution_time"]) + alert.action_run = True + alert.save() + else: + logger.error( + f"Failure action: {alert_template.action.name} failed to run on any agent for {self.hostname} outage" + ) def send_outage_email(self): from core.models import CoreSettings diff --git a/api/tacticalrmm/autotasks/models.py b/api/tacticalrmm/autotasks/models.py index 270c176f..028af7f0 100644 --- a/api/tacticalrmm/autotasks/models.py +++ b/api/tacticalrmm/autotasks/models.py @@ -3,15 +3,19 @@ import random import string import datetime as dt +from django.conf import settings from django.db import models from django.contrib.postgres.fields import ArrayField from django.db.models.fields import DateTimeField from logs.models import BaseAuditModel from tacticalrmm.utils import bitdays_to_string -from typing import Union + +from loguru import logger from alerts.models import SEVERITY_CHOICES +logger.configure(**settings.LOG_CONFIG) + RUN_TIME_DAY_CHOICES = [ (0, "Monday"), (1, "Tuesday"), @@ -273,16 +277,24 @@ class AutomatedTask(BaseAuditModel): alert_template.resolved_action_args, timeout=15, wait=True, + full=True, run_on_any=True, ) - alert.resolved_action_retcode = r["retcode"] - alert.resolved_action_stdout = r["stderr"] - alert.resolved_action_stderr = r["stderr"] - alert.resolved_action_execution_time = "{:.4f}".format( - r["execution_time"] - ) - alert.save() + # command was successful + if type(r) == dict: + alert.resolved_action_retcode = r["retcode"] + alert.resolved_action_stdout = r["stdout"] + alert.resolved_action_stderr = r["stderr"] + alert.resolved_action_execution_time = "{:.4f}".format( + r["execution_time"] + ) + alert.resolved_action_run = True + alert.save() + else: + logger.error( + f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for task: {self.name}" + ) # create alert if task is failing else: @@ -330,6 +342,30 @@ class AutomatedTask(BaseAuditModel): else None, ) + # check if any scripts should be run + if alert_template and alert_template.action and not alert.action_run: + r = self.agent.run_script( + alert_template.action, + alert_template.action_args, + timeout=15, + wait=True, + full=True, + run_on_any=True, + ) + + # command was successful + if type(r) == dict: + alert.action_retcode = r["retcode"] + alert.action_stdout = r["stdout"] + alert.action_stderr = r["stderr"] + alert.action_execution_time = "{:.4f}".format(r["execution_time"]) + alert.action_run = True + alert.save() + else: + logger.error( + f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for task: {self.name}" + ) + def send_email(self): from core.models import CoreSettings @@ -381,7 +417,7 @@ class AutomatedTask(BaseAuditModel): CORE.send_mail(subject, body, alert_template=alert_template) - def send_resolved_text(self): + def send_resolved_sms(self): from core.models import CoreSettings alert_template = self.agent.get_alert_template() diff --git a/api/tacticalrmm/checks/models.py b/api/tacticalrmm/checks/models.py index 723c9fef..1d2444c3 100644 --- a/api/tacticalrmm/checks/models.py +++ b/api/tacticalrmm/checks/models.py @@ -3,16 +3,17 @@ import string import os import json import pytz -from statistics import mean, mode +from statistics import mean from django.db import models from django.conf import settings from django.contrib.postgres.fields import ArrayField -from django.core.validators import MinValueValidator, MaxValueValidator from rest_framework.fields import JSONField from typing import List, Any from typing import Union +from loguru import logger + from core.models import CoreSettings from logs.models import BaseAuditModel from .tasks import ( @@ -24,6 +25,8 @@ from .tasks import ( from .utils import bytes2human from alerts.models import SEVERITY_CHOICES +logger.configure(**settings.LOG_CONFIG) + CHECK_TYPE_CHOICES = [ ("diskspace", "Disk Space Check"), ("ping", "Ping Check"), @@ -258,7 +261,6 @@ class Check(BaseAuditModel): def handle_alert(self) -> None: from alerts.models import Alert, AlertTemplate - from scripts.models import Script # return if agent is in maintenance mode if self.agent.maintenance_mode: @@ -279,7 +281,7 @@ class Check(BaseAuditModel): and alert_template.check_email_on_resolved and not alert.resolved_email_sent ): - handle_resolved_check_sms_alert_task.delay(pk=alert.pk) + handle_resolved_check_email_alert_task.delay(pk=alert.pk) # check if resolved text should be sent if ( @@ -300,16 +302,24 @@ class Check(BaseAuditModel): alert_template.resolved_action_args, timeout=15, wait=True, + full=True, run_on_any=True, ) - alert.resolved_action_retcode = r["retcode"] - alert.resolved_action_stdout = r["stderr"] - alert.resolved_action_stderr = r["stderr"] - alert.resolved_action_execution_time = "{:.4f}".format( - r["execution_time"] - ) - alert.save() + # command was successful + if type(r) == dict: + alert.resolved_action_retcode = r["retcode"] + alert.resolved_action_stdout = r["stdout"] + alert.resolved_action_stderr = r["stderr"] + alert.resolved_action_execution_time = "{:.4f}".format( + r["execution_time"] + ) + alert.resolved_action_run = True + alert.save() + else: + logger.error( + f"Resolved action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} resolved alert for {self.check_type} check" + ) elif self.fail_count >= self.fails_b4_alert: if not Alert.objects.filter(assigned_check=self, resolved=False).exists(): @@ -362,16 +372,24 @@ class Check(BaseAuditModel): r = self.agent.run_script( alert_template.action, alert_template.action_args, - timeout=15, + timeout=30, wait=True, + full=True, run_on_any=True, ) - alert.action_retcode = r["retcode"] - alert.action_stdout = r["stderr"] - alert.action_stderr = r["stderr"] - alert.action_execution_time = "{:.4f}".format(r["execution_time"]) - alert.save() + # command was successful + if type(r) == dict: + alert.action_retcode = r["retcode"] + alert.action_stdout = r["stdout"] + alert.action_stderr = r["stderr"] + alert.action_execution_time = "{:.4f}".format(r["execution_time"]) + alert.action_run = True + alert.save() + else: + logger.error( + f"Failure action: {alert_template.action.name} failed to run on any agent for {self.agent.hostname} failure alert for {self.check_type} check{r}" + ) def add_check_history(self, value: int, more_info: Any = None) -> None: CheckHistory.objects.create(check_history=self, y=value, results=more_info) @@ -853,7 +871,7 @@ class Check(BaseAuditModel): CORE.send_mail(subject, body, alert_template=alert_template) - def send_resolved_text(self): + def send_resolved_sms(self): CORE = CoreSettings.objects.first() alert_template = self.agent.get_alert_template() subject = f"{self.agent.client.name}, {self.agent.site.name}, {self} Resolved"