diff --git a/fetchnotifications.py b/fetchnotifications.py new file mode 100644 index 0000000..de2c99e --- /dev/null +++ b/fetchnotifications.py @@ -0,0 +1,92 @@ +import argparse +import os +import json +import library.migrationlogger as nrlogger +import library.clients.notificationsclient as notificationsclient +import library.localstore as store +import library.utils as utils + +logger = nrlogger.get_logger(os.path.basename(__file__)) +nc = notificationsclient.NotificationsClient() + + +def configure_parser(): + parser = argparse.ArgumentParser(description='Fetch and store notifications') + parser.add_argument('--account', nargs=1, type=str, required=False, help='Account ID') + parser.add_argument('--accounts', nargs=1, type=str, required=False, help='Path to file with account IDs') + parser.add_argument('--userApiKey', nargs=1, type=str, required=True, help='User API Key') + parser.add_argument('--region', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') + parser.add_argument('--destinations', dest='destinations', required=False, action='store_true', help='Query destinations') + parser.add_argument('--channels', dest='channels', required=False, action='store_true', help='Query channels') + return parser + + +def fetch_destinations(user_api_key, account_id, region, accounts_file=None): + destinations_by_id = get_config(nc.destinations, user_api_key, account_id, region, accounts_file) + store.save_notification_destinations(account_id, destinations_by_id) + return destinations_by_id + + +def fetch_channels(user_api_key, account_id, region, accounts_file=None): + channels_by_id = get_config(nc.channels, user_api_key, account_id, region, accounts_file) + store.save_notification_channels(account_id, channels_by_id) + return channels_by_id + + +def get_config(func, user_api_key, account_id, region, from_file): + acct_ids = [] + if account_id: + acct_ids = [account_id] + else: + acct_ids = store.load_names(from_file) + configs_by_id = {} + # Strip the class name + field = func.__name__ + for acct_id in acct_ids: + done = False + cursor = None + while not done: + try: + response = nc.query(func, user_api_key, int(acct_id), region, cursor) + logger.debug(json.dumps(response)) + config = response['response']['data']['actor']['account']['aiNotifications'][field]['entities'] + cursor = response['response']['data']['actor']['account']['aiNotifications'][field]['nextCursor'] + if ('error' in response): + logger.error(f'Could not fetch destinations for account {acct_id}') + logger.error(response['error']) + break + error = response['response']['data']['actor']['account']['aiNotifications'][field]['error'] + if (error is not None): + logger.error(f'Could not fetch destinations for account {acct_id}') + logger.error(error) + break + if cursor is None: + done = True + except: + logger.error(f'Error querying {field} for account {acct_id}') + else: + for element in config: + element['accountId'] = acct_id + configs_by_id.setdefault(element['id'], element) + logger.info(configs_by_id) + store.save_config_csv(field, configs_by_id) + return configs_by_id + + +def main(): + parser = configure_parser() + args = parser.parse_args() + user_api_key = utils.ensure_user_api_key(args) + if not user_api_key: + utils.error_and_exit('userApiKey', 'ENV_USER_API_KEY') + region = utils.ensure_region(args) + if args.destinations: + fetch_destinations(user_api_key, args.account[0], region, args.accounts[0] if args.accounts else None) + elif args.channels: + fetch_channels(user_api_key, args.account[0], region, args.accounts[0] if args.accounts else None) + else: + logger.info("pass [--destinations | --channels] to fetch configuration") + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/fetchworkflows.py b/fetchworkflows.py new file mode 100644 index 0000000..b73a9e8 --- /dev/null +++ b/fetchworkflows.py @@ -0,0 +1,75 @@ +import argparse +import os +import json +import library.migrationlogger as nrlogger +import library.clients.workflowsclient as workflowsclient +import library.localstore as store +import library.utils as utils + +logger = nrlogger.get_logger(os.path.basename(__file__)) +wc = workflowsclient.WorkflowsClient() + + +def configure_parser(): + parser = argparse.ArgumentParser(description='Fetch and store workflows') + parser.add_argument('--account', nargs=1, type=str, required=False, help='Account ID') + parser.add_argument('--accounts', nargs=1, type=str, required=False, help='Path to file with account IDs') + parser.add_argument('--userApiKey', nargs=1, type=str, required=True, help='User API Key') + parser.add_argument('--region', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') + return parser + + +def fetch_workflows(user_api_key, account_id, region, accounts_file=None): + workflow_by_source_id = get_config(wc.workflows, user_api_key, account_id, region, accounts_file) + store.save_workflows(account_id, workflow_by_source_id) + return workflow_by_source_id + + +def get_config(func, user_api_key, account_id, region, from_file): + acct_ids = [] + if account_id: + acct_ids = [account_id] + else: + acct_ids = store.load_names(from_file) + configs_by_id = {} + # Strip the class name + field = func.__name__ + for acct_id in acct_ids: + done = False + cursor = None + while not done: + try: + response = wc.query(func, user_api_key, int(acct_id), region, cursor) + logger.debug(json.dumps(response)) + config = response['response']['data']['actor']['account']['aiWorkflows'][field]['entities'] + cursor = response['response']['data']['actor']['account']['aiWorkflows'][field]['nextCursor'] + if ('error' in response): + logger.error(f'Could not fetch workflows for account {acct_id}') + logger.error(response['error']) + break + # No error attribute for aiWorkflows + if cursor is None: + done = True + except: + logger.error(f'Error querying {field} for account {acct_id}') + else: + for element in config: + element['accountId'] = acct_id + configs_by_id.setdefault(element['id'], element) + logger.info(configs_by_id) + store.save_config_csv(field, configs_by_id) + return configs_by_id + + +def main(): + parser = configure_parser() + args = parser.parse_args() + user_api_key = utils.ensure_user_api_key(args) + if not user_api_key: + utils.error_and_exit('userApiKey', 'ENV_USER_API_KEY') + region = utils.ensure_region(args) + fetch_workflows(user_api_key, args.account[0], args.accounts[0], region) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/library/clients/notificationsclient.py b/library/clients/notificationsclient.py new file mode 100644 index 0000000..03af8b6 --- /dev/null +++ b/library/clients/notificationsclient.py @@ -0,0 +1,612 @@ +import json +import os +import library.utils as utils +import library.migrationlogger as nrlogger +import library.clients.gql as nerdgraph + +DESTINATION_TYPE_EMAIL = 'EMAIL' +DESTINATION_TYPE_EVENT_BRIDGE = 'EVENT_BRIDGE' +DESTINATION_TYPE_JIRA = 'JIRA' +DESTINATION_TYPE_MOBILE_PUSH = 'MOBILE_PUSH' +DESTINATION_TYPE_PAGERDUTY_ACCOUNT_INTEGRATION = 'PAGERDUTY_ACCOUNT_INTEGRATION' +DESTINATION_TYPE_PAGERDUTY_SERVICE_INTEGRATION = 'PAGERDUTY_SERVICE_INTEGRATION' +DESTINATION_TYPE_SERVICE_NOW = 'SERVICE_NOW' +DESTINATION_TYPE_SLACK = 'SLACK' +DESTINATION_TYPE_SLACK_COLLABORATION = 'SLACK_COLLABORATION' +DESTINATION_TYPE_SLACK_LEGACY = 'SLACK_LEGACY' +DESTINATION_TYPE_WEBHOOK = 'WEBHOOK' + +CHANNEL_PRODUCT_ALERTS = 'ALERTS' +CHANNEL_PRODUCT_DISCUSSIONS = 'DISCUSSIONS' +CHANNEL_PRODUCT_ERROR_TRACKING = 'ERROR_TRACKING' +CHANNEL_PRODUCT_IINT = 'IINT' +CHANNEL_PRODUCT_NTFC = 'NTFC' +CHANNEL_PRODUCT_PD = 'PD' +CHANNEL_PRODUCT_SHARING = 'SHARING' + +CHANNEL_TYPE_EMAIL = 'EMAIL' +CHANNEL_TYPE_EVENT_BRIDGE = 'EVENT_BRIDGE' +CHANNEL_TYPE_JIRA_CLASSIC = 'JIRA_CLASSIC' +CHANNEL_TYPE_JIRA_NEXTGEN = 'JIRA_NEXTGEN' +CHANNEL_TYPE_MOBILE_PUSH = 'MOBILE_PUSH' +CHANNEL_TYPE_PAGERDUTY_ACCOUNT_INTEGRATION = 'PAGERDUTY_ACCOUNT_INTEGRATION' +CHANNEL_TYPE_PAGERDUTY_SERVICE_INTEGRATION = 'PAGERDUTY_SERVICE_INTEGRATION' +CHANNEL_TYPE_SERVICENOW_EVENTS = 'SERVICENOW_EVENTS' +CHANNEL_TYPE_SERVICENOW_INCIDENTS = 'SERVICENOW_INCIDENTS' +CHANNEL_TYPE_SLACK = 'SLACK' +CHANNEL_TYPE_SLACK_COLLABORATION = 'SLACK_COLLABORATION' +CHANNEL_TYPE_SLACK_LEGACY = 'SLACK_LEGACY' +CHANNEL_TYPE_WEBHOOK = 'WEBHOOK' + +logger = nrlogger.get_logger(os.path.basename(__file__)) + + +class NotificationsClient: + + def __init__(self): + pass + + + @staticmethod + def query(func, user_api_key, account_id, region, cursor): + payload = func(account_id, cursor) + logger.debug(json.dumps(payload)) + return nerdgraph.GraphQl.post(user_api_key, payload, region) + + + @staticmethod + def create_email_destination(destination, user_api_key, account_id, region): + logger.info(f"Destination {destination['name']} creation started.") + payload = NotificationsClient.email_destination_payload(account_id, destination) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateDestination']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateDestination']['error']}") + else: + destination_id = result['response']['data']['aiNotificationsCreateDestination']['destination']['id'] + destination.setdefault('targetDestinationId', destination_id) + logger.info(f"Destination {destination['name']} with id {destination['targetDestinationId']} creation complete.") + return result + + + @staticmethod + def create_webhook_destination(destination, user_api_key, account_id, region): + logger.info(f"Destination {destination['name']} creation started.") + payload = NotificationsClient.webhook_destination_payload(account_id, destination) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateDestination']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateDestination']['error']}") + else: + destination_id = result['response']['data']['aiNotificationsCreateDestination']['destination']['id'] + destination.setdefault('targetDestinationId', destination_id) + logger.info(f"Destination {destination['name']} with id {destination['targetDestinationId']} creation complete.") + return result + + + @staticmethod + def create_mobile_push_destination(destination, user_api_key, account_id, region): + logger.info(f"Destination {destination['name']} creation started.") + payload = NotificationsClient.mobile_push_destination_payload(account_id, destination) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateDestination']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateDestination']['error']}") + else: + destination_id = result['response']['data']['aiNotificationsCreateDestination']['destination']['id'] + destination.setdefault('targetDestinationId', destination_id) + logger.info(f"Destination {destination['name']} with id {destination['targetDestinationId']} creation complete.") + return result + + + @staticmethod + def create_slack_legacy_destination(destination, user_api_key, account_id, region): + logger.info(f"Destination {destination['name']} creation started.") + payload = NotificationsClient.slack_legacy_destination_payload(account_id, destination) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateDestination']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateDestination']['error']}") + else: + destination_id = result['response']['data']['aiNotificationsCreateDestination']['destination']['id'] + destination.setdefault('targetDestinationId', destination_id) + logger.info(f"Destination {destination['name']} with id {destination['targetDestinationId']} creation complete.") + return result + + + @staticmethod + def delete_all_destinations(destinations_by_id, user_api_key, account_id, region): + logger.info(f"Destination deletion for account {account_id} started.") + for destination in destinations_by_id.values(): + NotificationsClient.delete_destination(destination, user_api_key, account_id, region) + logger.info(f"Destination deletion for account {account_id} complete.") + + + @staticmethod + def delete_destination(destination, user_api_key, account_id, region): + logger.info(f"Destination {destination['name']} with id {destination['id']} deletion started.") + payload = NotificationsClient.delete_destination_payload(account_id, destination['id']) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsDeleteDestination']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsDeleteDestination']['error']}") + else: + logger.info(f"Destination {destination['name']} with id {destination['id']} deletion complete.") + return result + + + @staticmethod + def create_email_channel(channel, user_api_key, account_id, region): + logger.info(f"Channel {channel['name']} creation started.") + payload = NotificationsClient.email_channel_payload(account_id, channel) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateChannel']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateChannel']['error']}") + else: + channel_id = result['response']['data']['aiNotificationsCreateChannel']['channel']['id'] + channel.setdefault('targetChannelId', channel_id) + logger.info(f"Channel {channel['name']} with id {channel['targetChannelId']} creation complete.") + return result + + + @staticmethod + def create_webhook_channel(channel, user_api_key, account_id, region): + logger.info(f"Channel {channel['name']} creation started.") + payload = NotificationsClient.webhook_channel_payload(account_id, channel) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateChannel']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateChannel']['error']}") + else: + channel_id = result['response']['data']['aiNotificationsCreateChannel']['channel']['id'] + channel.setdefault('targetChannelId', channel_id) + logger.info(f"Channel {channel['name']} with id {channel['targetChannelId']} creation complete.") + return result + + + @staticmethod + def create_mobile_push_channel(channel, user_api_key, account_id, region): + logger.info(f"Channel {channel['name']} creation started.") + payload = NotificationsClient.mobile_push_channel_payload(account_id, channel) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateChannel']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateChannel']['error']}") + else: + channel_id = result['response']['data']['aiNotificationsCreateChannel']['channel']['id'] + channel.setdefault('targetChannelId', channel_id) + logger.info(f"Channel {channel['name']} with id {channel['targetChannelId']} creation complete.") + return result + + + @staticmethod + def create_slack_legacy_channel(channel, user_api_key, account_id, region): + logger.info(f"Channel {channel['name']} creation started.") + payload = NotificationsClient.slack_legacy_channel_payload(account_id, channel) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsCreateChannel']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsCreateChannel']['error']}") + else: + channel_id = result['response']['data']['aiNotificationsCreateChannel']['channel']['id'] + channel.setdefault('targetChannelId', channel_id) + logger.info(f"Channel {channel['name']} with id {channel['targetChannelId']} creation complete.") + return result + + + @staticmethod + def delete_all_channels(channels_by_id, user_api_key, account_id, region): + logger.info(f"Channel deletion for account {account_id} started.") + for channel in channels_by_id.values(): + NotificationsClient.delete_channel(channel, user_api_key, account_id, region) + logger.info(f"Channel deletion for account {account_id} complete.") + + + @staticmethod + def delete_channel(channel, user_api_key, account_id, region): + logger.info(f"Channel {channel['name']} with id {channel['id']} deletion started.") + payload = NotificationsClient.delete_channel_payload(account_id, channel['id']) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiNotificationsDeleteChannel']['error'] is not None: + logger.error(f"Error : {result['response']['data']['aiNotificationsDeleteChannel']['error']}") + else: + logger.info(f"Channel {channel['name']} with id {channel['id']} deletion complete.") + return result + + + @staticmethod + def destinations(account_id, cursor): + query = '''query($accountId: Int!, $cursor: String) { + actor { + account(id: $accountId) { + aiNotifications { + destinations (cursor: $cursor) { + entities { + id + name + active + createdAt + properties { + displayValue + key + label + value + } + status + type + updatedAt + updatedBy + accountId + isUserAuthenticated + lastSent + } + error { + details + description + type + } + nextCursor + totalCount + } + } + } + } + }''' + variables = {'accountId': account_id, 'cursor': cursor} + return {'query': query, 'variables': variables} + + + @staticmethod + def channels(account_id, cursor): + query = '''query($accountId: Int!, $cursor: String) { + actor { + account(id: $accountId) { + aiNotifications { + channels (cursor: $cursor) { + entities { + accountId + active + createdAt + destinationId + id + name + product + properties { + displayValue + key + label + value + } + status + type + updatedBy + updatedAt + } + error { + description + details + type + } + nextCursor + totalCount + } + } + } + } + }''' + variables = {'accountId': account_id, 'cursor': cursor} + return {'query': query, 'variables': variables} + + + @staticmethod + def email_destination_payload(account_id, destination): + mutation = '''mutation ($accountId: Int!, $destinationName: String!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateDestination(accountId: $accountId, destination: { + name: $destinationName, + properties: $properties, + type: EMAIL + }) { + destination { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'destinationName': destination['name'], + 'properties': destination['properties'] + } + } + + + @staticmethod + def webhook_destination_payload(account_id, destination): + mutation = '''mutation ($accountId: Int!, $destinationName: String!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateDestination(accountId: $accountId, destination: { + name: $destinationName, + properties: $properties, + type: WEBHOOK + }) { + destination { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'destinationName': destination['name'], + 'properties': destination['properties'] + } + } + + + @staticmethod + def mobile_push_destination_payload(account_id, destination): + mutation = '''mutation ($accountId: Int!, $destinationName: String!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateDestination(accountId: $accountId, destination: { + name: $destinationName, + properties: $properties, + type: MOBILE_PUSH + }) { + destination { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'destinationName': destination['name'], + 'properties': destination['properties'] + } + } + + + @staticmethod + def slack_legacy_destination_payload(account_id, destination): + mutation = '''mutation ($accountId: Int!, $destinationName: String!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateDestination(accountId: $accountId, destination: { + name: $destinationName, + properties: $properties, + type: SLACK_LEGACY + }) { + destination { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'destinationName': destination['name'], + 'properties': destination['properties'] + } + } + + + @staticmethod + def delete_destination_payload(account_id, destination_id): + mutation = '''mutation ($accountId: Int!, $destinationId: ID!) { + aiNotificationsDeleteDestination(accountId: $accountId, destinationId: $destinationId) { + ids + error { + details + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'destinationId': destination_id + } + } + + + @staticmethod + def email_channel_payload(account_id, channel): + mutation = '''mutation ($accountId: Int!, $channelName: String!, $destinationId: ID!, $product: AiNotificationsProduct!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateChannel(accountId: $accountId, channel: { + destinationId: $destinationId, + name: $channelName, + product: $product, + properties: $properties, + type: EMAIL + }) { + channel { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'channelName': channel['name'], + 'destinationId': channel['destinationId'], + 'product': channel['product'], + 'properties': channel['properties'] + } + } + + + @staticmethod + def webhook_channel_payload(account_id, channel): + mutation = '''mutation ($accountId: Int!, $channelName: String!, $destinationId: ID!, $product: AiNotificationsProduct!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateChannel(accountId: $accountId, channel: { + destinationId: $destinationId, + name: $channelName, + product: $product, + properties: $properties, + type: WEBHOOK + }) { + channel { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'channelName': channel['name'], + 'destinationId': channel['destinationId'], + 'product': channel['product'], + 'properties': channel['properties'] + } + } + + + @staticmethod + def mobile_push_channel_payload(account_id, channel): + mutation = '''mutation ($accountId: Int!, $channelName: String!, $destinationId: ID!, $product: AiNotificationsProduct!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateChannel(accountId: $accountId, channel: { + destinationId: $destinationId, + name: $channelName, + product: $product, + properties: $properties, + type: MOBILE_PUSH + }) { + channel { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'channelName': channel['name'], + 'destinationId': channel['destinationId'], + 'product': channel['product'], + 'properties': channel['properties'] + } + } + + + @staticmethod + def slack_legacy_channel_payload(account_id, channel): + mutation = '''mutation ($accountId: Int!, $channelName: String!, $destinationId: ID!, $product: AiNotificationsProduct!, $properties: [AiNotificationsPropertyInput!]!) { + aiNotificationsCreateChannel(accountId: $accountId, channel: { + destinationId: $destinationId, + name: $channelName, + product: $product, + properties: $properties, + type: SLACK_LEGACY + }) { + channel { + id + name + } + error { + ... on AiNotificationsResponseError { + description + details + type + } + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'channelName': channel['name'], + 'destinationId': channel['destinationId'], + 'product': channel['product'], + 'properties': channel['properties'] + } + } + + + @staticmethod + def delete_channel_payload(account_id, channel_id): + mutation = '''mutation ($accountId: Int!, $channelId: ID!) { + aiNotificationsDeleteChannel(accountId: $accountId, channelId: $channelId) { + ids + error { + details + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'channelId': channel_id + } + } diff --git a/library/clients/workflowsclient.py b/library/clients/workflowsclient.py new file mode 100644 index 0000000..d57b862 --- /dev/null +++ b/library/clients/workflowsclient.py @@ -0,0 +1,222 @@ +import json +import os +import library.utils as utils +import library.migrationlogger as nrlogger +import library.clients.gql as nerdgraph + +logger = nrlogger.get_logger(os.path.basename(__file__)) + + +class WorkflowsClient: + + def __init__(self): + pass + + + @staticmethod + def query(func, user_api_key, account_id, region, cursor): + payload = func(account_id, cursor) + logger.debug(json.dumps(payload)) + return nerdgraph.GraphQl.post(user_api_key, payload, region) + + + @staticmethod + def workflows(account_id, cursor): + query = '''query($accountId: Int!, $cursor: String) { + actor { + account(id: $accountId) { + aiWorkflows { + workflows (cursor: $cursor) { + entities { + accountId + createdAt + destinationConfigurations { + channelId + name + notificationTriggers + type + } + destinationsEnabled + enrichments { + accountId + configurations { + ... on AiWorkflowsNrqlConfiguration { + query + } + } + createdAt + id + name + type + updatedAt + } + enrichmentsEnabled + id + issuesFilter { + accountId + id + name + predicates { + attribute + operator + values + } + type + } + lastRun + mutingRulesHandling + name + updatedAt + workflowEnabled + } + nextCursor + totalCount + } + } + } + } + }''' + variables = {'accountId': account_id, 'cursor': cursor} + return {'query': query, 'variables': variables} + + + @staticmethod + def create_workflow(workflow, user_api_key, account_id, region): + logger.info(f"Workflow {workflow['name']} creation started.") + workflowData = {} + workflowKeysToCopy = set([ + 'destinationsEnabled', + 'enrichmentsEnabled', + 'mutingRulesHandling', + 'name', + 'workflowEnabled' + ]) + # creating a shallow copy using for loop + for key, value in workflow.items(): + if key in workflowKeysToCopy: + workflowData[key] = value + if len(workflow['enrichments']) < 1: + workflowData['enrichments'] = None + # Update channel id values in destinationConfigurations + logger.info(f"Update channel id values in destinationConfigurations") + if 'destinationConfigurations' in workflow: + workflowData['destinationConfigurations'] = { + 'channelId': workflow['destinationConfigurations'][0]['targetChannelId'], # It's unclear why destination configurations is of type list, when there is only one channel permitted per workflow. From the docs[https://docs.newrelic.com/docs/apis/nerdgraph/examples/nerdgraph-api-workflows/#create-workflow]: Callout note: A channel ID is unique and so can't be used in multiple workflows or used multiple times in the same workflow. + 'notificationTriggers': workflow['destinationConfigurations'][0]['notificationTriggers'] + } + # Update policy id values in issuesFilter + logger.info(f"Update policy id values in issuesFilter") + predicates = [] + for predicate in workflow['issuesFilter']['predicates']: + target_predicate = { + 'attribute': predicate['attribute'], # String! + 'operator': predicate['operator'], # iWorkflowsOperator! + 'values': predicate['targetValues'] # [String!]! + } + predicates.append(target_predicate) + if 'issuesFilter' in workflow: + workflowData['issuesFilter'] = { + 'name': workflow['issuesFilter']['name'], # String: this is a guid, which is unexpected + 'predicates': predicates, # [AiWorkflowsPredicateInput!]!: + 'type': workflow['issuesFilter']['type'] # AiWorkflowsFilterType!: + } + payload = WorkflowsClient.workflow_payload(account_id, workflowData) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if len(result['response']['data']['aiWorkflowsCreateWorkflow']['errors']) > 0: + logger.error(f"Error : {result['response']['data']['aiWorkflowsCreateWorkflow']['errors']}") + else: + workflow_id = result['response']['data']['aiWorkflowsCreateWorkflow']['workflow']['id'] + workflow.setdefault('targetWorkflowId', workflow_id) + logger.info(f"Workflow {workflow['name']} with id {workflow['targetWorkflowId']} creation complete.") + return result + + + @staticmethod + def workflow_payload(account_id, workflowData): + mutation = '''mutation ($accountId: Int!, $workflowData: AiWorkflowsCreateWorkflowInput!) { + aiWorkflowsCreateWorkflow( + accountId: $accountId + createWorkflowData: $workflowData) { + workflow { + id + name + destinationConfigurations { + channelId + name + type + notificationTriggers + } + enrichmentsEnabled + destinationsEnabled + issuesFilter { + accountId + id + name + predicates { + attribute + operator + values + } + type + } + lastRun + workflowEnabled + mutingRulesHandling + } + errors { + description + type + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'workflowData': workflowData + } + } + + @staticmethod + def delete_all_workflows(workflows_by_id, user_api_key, account_id, region, delete_channels=True): + logger.info(f"Workflow deletion for account {account_id} started.") + for workflow in workflows_by_id.values(): + WorkflowsClient.delete_workflow(workflow, user_api_key, account_id, region, delete_channels) + logger.info(f"Workflow deletion for account {account_id} complete.") + + + @staticmethod + def delete_workflow(workflow, user_api_key, account_id, region, delete_channels=True): + logger.info(f"Workflow {workflow['name']} with id {workflow['id']} deletion started.") + payload = WorkflowsClient.delete_workflow_payload(account_id, delete_channels, workflow['id']) + logger.debug(json.dumps(payload)) + result = nerdgraph.GraphQl.post(user_api_key, payload, region) + if 'response' in result: + if result['response']['data']['aiWorkflowsDeleteWorkflow']['errors'] is not None: + logger.error(f"Error : {result['response']['data']['aiWorkflowsDeleteWorkflow']['errors']}") + else: + logger.info(f"Workflow {workflow['name']} with id {workflow['id']} deletion complete.") + return result + + + @staticmethod + def delete_workflow_payload(account_id, delete_channels, id): + mutation = '''mutation ($accountId: Int!, $deleteChannels: Boolean!, $id: ID!) { + aiWorkflowsDeleteWorkflow(accountId: $accountId, deleteChannels: $deleteChannels, id: $id) { + id + errors { + description + type + } + } + }''' + return { + 'query': mutation, + 'variables': { + 'accountId': int(account_id), + 'deleteChannels': delete_channels, + 'id': id + } + } diff --git a/library/localstore.py b/library/localstore.py index cfdcc58..6fe1e15 100644 --- a/library/localstore.py +++ b/library/localstore.py @@ -23,7 +23,12 @@ ALERT_VIOLATIONS_CSV = "alert_violations.csv" ALERT_CHANNELS_FILE = "alert_channels.json" MONITOR_LABELS_CSV = "monitor_labels.csv" +NOTIFICATION_CHANNELS_FILE = 'notification_channels.json' +NOTIFICATION_DESTINATIONS_FILE = 'notification_destinations.json' +NOTIFICATIONS_DIR = "notifications" SYNTHETIC_ALERTS_FILE = "synthetics_alerts.json" +WORKFLOWS_DIR = "workflows" +WORKFLOWS_FILE = "workflows.json" logger = migrationlogger.get_logger(os.path.basename(__file__)) @@ -128,6 +133,47 @@ def save_feature_settings_csv(fs_data: list): csv_writer.writerows(host_data + [""] for host_data in fs_data) +def save_config_csv(config_name: str, fs_data): + output_dir = Path("output") + fs_file_name = config_name + '.csv' + fs_data_file = output_dir / fs_file_name + create_file(fs_data_file) + with open(str(fs_data_file), 'w', newline='') as csvfile: + csv_writer = csv.writer(csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) + # Counter variable used for writing headers to the CSV file + isHeaderSet = False + for data in fs_data: + if type(data) is list: + for element in data: + if isHeaderSet == False: + # Writing headers of CSV file + header = element.keys() + csv_writer.writerow(header) + isHeaderSet = True + # Writing data of CSV file + csv_writer.writerow(element.values()) + elif type(data) is object: + if isHeaderSet == False: + # Writing headers of CSV file + header = data.keys() + csv_writer.writerow(header) + isHeaderSet = True + # Writing data of CSV file + csv_writer.writerow(data.values()) + elif type(fs_data) is dict and type(data) is str: + if isHeaderSet == False: + # Writing headers of CSV file + header = fs_data.get(data).keys() + csv_writer.writerow(header) + isHeaderSet = True + # Writing data of CSV file + csv_writer.writerow(fs_data.get(data).values()) + else: + logger.error("Unexpected data type: ", type(data)) + # Close file + csvfile.close() + + def load_names(from_file): names = [] with open(from_file) as input_names: @@ -322,3 +368,24 @@ def save_synth_conditions(account_id, synth_conditions): base_dir = Path("db") alert_policies_dir = base_dir / account_id / ALERT_POLICIES_DIR save_json(alert_policies_dir, SYNTHETIC_ALERTS_FILE, synth_conditions) + + +# db//notifications/notification_destinations.json +def save_notification_destinations(account_id, destinations): + base_dir = Path("db") + notifications_dir = base_dir / account_id / NOTIFICATIONS_DIR + save_json(notifications_dir, NOTIFICATION_DESTINATIONS_FILE, destinations) + + +# db//notifications/notification_channels.json +def save_notification_channels(account_id, channels): + base_dir = Path("db") + notifications_dir = base_dir / account_id / NOTIFICATIONS_DIR + save_json(notifications_dir, NOTIFICATION_CHANNELS_FILE, channels) + + +# db//workflows/workflows.json +def save_workflows(account_id, workflows): + base_dir = Path("db") + workflows_dir = base_dir / account_id / WORKFLOWS_DIR + save_json(workflows_dir, WORKFLOWS_FILE, workflows) diff --git a/migrate_account.py b/migrate_account.py index d15b78f..e45fce8 100644 --- a/migrate_account.py +++ b/migrate_account.py @@ -9,6 +9,8 @@ import fetchworkflows as fetchworkflows import library.clients.alertsclient as ac import library.clients.entityclient as ec +import library.clients.notificationsclient as nc +import library.clients.workflowsclient as wc import library.migrationlogger as m_logger import library.securecredentials as sec_credentials import library.utils as utils @@ -16,8 +18,10 @@ import migrate_dashboards as md import migrateconditions as mc import migratemonitors as mm +import migrate_notifications as mn import migratepolicies as mp import migratetags as mt +import migrate_workflows as mw import store_policies as store_policies SRC_ACCT = '1234567' @@ -61,6 +65,15 @@ def cleanup(): # reset_app() logger.info('deleting all target dashboards') ec.delete_all_dashboards(TGT_API_KEY, TGT_ACCT, TGT_REGION) + logger.info('deleting all target workflows') + workflows_by_target_id = fetchworkflows.fetch_workflows(TGT_API_KEY, TGT_ACCT, TGT_REGION, accounts_file=None) + wc.WorkflowsClient.delete_all_workflows(workflows_by_target_id, TGT_API_KEY, TGT_ACCT, TGT_REGION, delete_channels=False) + logger.info('deleting all target channels') + channels_by_target_id = fetchnotifications.fetch_channels(TGT_API_KEY, TGT_ACCT, TGT_REGION, accounts_file=None) + nc.NotificationsClient.delete_all_channels(channels_by_target_id, TGT_API_KEY, TGT_ACCT, TGT_REGION) + logger.info('deleting all target destinations') + destinations_by_target_id = fetchnotifications.fetch_destinations(TGT_API_KEY, TGT_ACCT, TGT_REGION, accounts_file=None) + nc.NotificationsClient.delete_all_destinations(destinations_by_target_id, TGT_API_KEY, TGT_ACCT, TGT_REGION) def fetch(): @@ -70,6 +83,7 @@ def fetch(): logger.info(f'Timestamp for fetched monitors: {src_mon_time_stamp}') logger.info('Fetching alert channels for policies') store_policies.store_alert_policies(SRC_ACCT, SRC_API_KEY, SRC_REGION) + # Fetch legacy channels is redundant, as migrate_policies will retrieve the channels, but it does create the alert_channels.json for review fetchchannels.fetch_alert_channels(SRC_API_KEY, SRC_ACCT, SRC_REGION) fetchentities.fetch_entities(SRC_ACCT, SRC_API_KEY, [ec.DASHBOARD], '{}_dashboards.csv'.format(SRC_ACCT), tag_name=None, tag_value=None, src_region=SRC_REGION, assessment=None) fetchentities.fetch_entities(SRC_ACCT, SRC_API_KEY, [ec.APM_APP], '{}_apm.csv'.format(SRC_ACCT), tag_name=None, tag_value=None, src_region=SRC_REGION, assessment=None) @@ -80,16 +94,16 @@ def migrate_step1(): mm.migrate_monitors('output/' + SRC_MON_LIST_FILE, SRC_ACCT, SRC_REGION, SRC_API_KEY, src_mon_time_stamp, TGT_ACCT, TGT_REGION, TGT_API_KEY, MINION_MAPPING_FILE) # Migrate Synthetic monitor entity tags mt.migrate_tags('output/' + SRC_MON_LIST_FILE, SRC_ACCT, SRC_REGION, SRC_API_KEY, TGT_ACCT, TGT_REGION, TGT_API_KEY, [ec.SYNTH_MONITOR]) - policy_names = utils.load_alert_policy_names( - POLICY_NAME_FILE, - ENTITY_NAME_FILE, - SRC_ACCT, - SRC_REGION, - SRC_API_KEY, - USE_LOCAL - ) - mp.migrate_alert_policies(policy_names, SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION) - mc.migrate_conditions(policy_names, SRC_ACCT, SRC_REGION, SRC_API_KEY, TGT_ACCT, TGT_REGION, TGT_API_KEY, COND_TYPES, MATCH_SOURCE_CONDITION_STATE) + # Migrate alert policies + policies_by_source_id = mp.migrate(POLICY_NAME_FILE, ENTITY_NAME_FILE, SRC_ACCT, SRC_REGION, TGT_ACCT, TGT_REGION, SRC_API_KEY, TGT_API_KEY, USE_LOCAL) + # Migrate alert conditions + mc.migrate(POLICY_NAME_FILE, ENTITY_NAME_FILE, SRC_ACCT, SRC_REGION, TGT_ACCT, TGT_REGION, SRC_API_KEY, TGT_API_KEY, COND_TYPES, USE_LOCAL, MATCH_SOURCE_CONDITION_STATE) + # Migrate notification destinations + destinations_by_source_id = mn.migrate_destinations(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION) + # Migrate notification channels + channels_by_source_id = mn.migrate_channels(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION, destinations_by_source_id) + # Migrate workflows + workflows_by_source_id = mw.migrate_workflows(SRC_ACCT, SRC_API_KEY, SRC_REGION, TGT_ACCT, TGT_API_KEY, TGT_REGION, channels_by_source_id, policies_by_source_id) def migrate_step2(): @@ -107,3 +121,5 @@ def migrate_step2(): migrate_step1() # Redirect apps, then # migrate_step2() + logger.info('Completed migration') + diff --git a/migrate_notifications.py b/migrate_notifications.py new file mode 100644 index 0000000..0321df2 --- /dev/null +++ b/migrate_notifications.py @@ -0,0 +1,165 @@ +import os +import argparse +import fetchnotifications as fetchnotifications +import library.clients.notificationsclient as notificationsclient +import library.localstore as store +import library.migrationlogger as m_logger +import library.utils as utils + + +log = m_logger.get_logger(os.path.basename(__file__)) +nc = notificationsclient.NotificationsClient() + + +def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): + log.info("Using sourceAccount : " + str(args.sourceAccount[0])) + log.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) + log.info("sourceRegion : " + src_region) + log.info("Using targetAccount : " + str(args.targetAccount[0])) + log.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) + log.info("targetRegion : " + tgt_region) + + +def configure_parser(): + parser = argparse.ArgumentParser(description='Migrate Notifications') + parser.add_argument('--sourceAccount', nargs=1, type=int, required=True, help='Source accountId') + parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key or \ + set environment variable ENV_SOURCE_API_KEY') + parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') + parser.add_argument('--targetAccount', nargs=1, type=int, required=True, help='Target accountId') + parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ + or set environment variable ENV_TARGET_API_KEY') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') + parser.add_argument('--destinations', dest='destinations', required=False, action='store_true', help='Migrate destinations') + parser.add_argument('--channels', dest='channels', required=False, action='store_true', help='Migrate channels') + return parser + + +def create_email_destination(destination, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating destination: {destination['name']} of type {destination['type']}") + response = nc.create_email_destination(destination, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created destination: {destination['name']} of type {destination['type']}") + + +def create_webhook_destination(destination, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating destination: {destination['name']} of type {destination['type']}") + response = nc.create_webhook_destination(destination, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created destination: {destination['name']} of type {destination['type']}") + + +def create_mobile_push_destination(destination, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating destination: {destination['name']} of type {destination['type']}") + response = nc.create_mobile_push_destination(destination, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created destination: {destination['name']} of type {destination['type']}") + + +def create_slack_legacy_destination(destination, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating destination: {destination['name']} of type {destination['type']}") + response = nc.create_slack_legacy_destination(destination, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created destination: {destination['name']} of type {destination['type']}") + + +def create_destination(destination, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating destination: {destination['name']}") + if destination['type'] == notificationsclient.DESTINATION_TYPE_EMAIL: + create_email_destination(destination, tgt_acct, tgt_api_key, tgt_region) + elif destination['type'] == notificationsclient.DESTINATION_TYPE_MOBILE_PUSH: + create_mobile_push_destination(destination, tgt_acct, tgt_api_key, tgt_region) + elif destination['type'] == notificationsclient.DESTINATION_TYPE_SLACK_LEGACY: + create_slack_legacy_destination(destination, tgt_acct, tgt_api_key, tgt_region) + elif destination['type'] == notificationsclient.DESTINATION_TYPE_WEBHOOK: + create_webhook_destination(destination, tgt_acct, tgt_api_key, tgt_region) + else: + log.warn(f"Unsupported destination type: {destination['type']}, for destination: {destination['name']}") + + +def create_email_channel(channel, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating channel: {channel['name']} of type {channel['type']}") + nc.create_email_channel(channel, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created channel: {channel['name']} of type {channel['type']}") + + +def create_webhook_channel(channel, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating channel: {channel['name']} of type {channel['type']}") + nc.create_webhook_channel(channel, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created channel: {channel['name']} of type {channel['type']}") + + +def create_mobile_push_channel(channel, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating channel: {channel['name']} of type {channel['type']}") + nc.create_mobile_push_channel(channel, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created channel: {channel['name']} of type {channel['type']}") + + +def create_slack_legacy_channel(channel, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating channel: {channel['name']} of type {channel['type']}") + nc.create_slack_legacy_channel(channel, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created channel: {channel['name']} of type {channel['type']}") + + +def create_channel(channel, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating channel: {channel['name']}") + if channel['type'] == notificationsclient.CHANNEL_TYPE_EMAIL: + create_email_channel(channel, tgt_acct, tgt_api_key, tgt_region) + elif channel['type'] == notificationsclient.CHANNEL_TYPE_MOBILE_PUSH: + create_mobile_push_channel(channel, tgt_acct, tgt_api_key, tgt_region) + elif channel['type'] == notificationsclient.CHANNEL_TYPE_SLACK_LEGACY: + create_slack_legacy_channel(channel, tgt_acct, tgt_api_key, tgt_region) + elif channel['type'] == notificationsclient.CHANNEL_TYPE_WEBHOOK: + create_webhook_channel(channel, tgt_acct, tgt_api_key, tgt_region) + else: + log.warn(f"Unsupported channel type: {channel['type']}, for channel: {channel['name']}") + + +def migrate_destinations(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region): + log.info('Destinations migration started.') + destinations_by_source_id = fetchnotifications.fetch_destinations(src_api_key, src_acct, src_region) + for destination in destinations_by_source_id.values(): + log.info(f"Destination name: {destination['name']}") + create_destination(destination, tgt_acct, tgt_api_key, tgt_region) + log.info('Destinations migration complete.') + return destinations_by_source_id + + +def migrate_channels(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region, destinations_by_source_id): + log.info('Channels migration started.') + channels_by_source_id = fetchnotifications.fetch_channels(src_api_key, src_acct, src_region) + for channel in channels_by_source_id.values(): + log.info(f"Channel name: {channel['name']}") + log.info(f"Mutating destination id for target account: {tgt_acct}") + source_destination_id = channel['destinationId'] + if source_destination_id in destinations_by_source_id: + if 'targetDestinationId' in destinations_by_source_id.get(source_destination_id): + # Mutate channel destinationId, replacing destinationId with targetDestinationId + channel['destinationId'] = destinations_by_source_id.get(source_destination_id)['targetDestinationId'] + log.info(f"Substituting destination id: {source_destination_id} with id: {(channel['destinationId'])}") + create_channel(channel, tgt_acct, tgt_api_key, tgt_region) + else: + log.error(f"Unable to create channel name: {channel['name']}, with source channel id: {channel['id']} and type: {channel['type']}. Target destination id unavailable for source destination: {source_destination_id}") + log.info('Channels migration complete.') + return channels_by_source_id + + +def main(): + parser = configure_parser() + args = parser.parse_args() + src_api_key = utils.ensure_source_api_key(args) + if not src_api_key: + utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') + tgt_api_key = utils.ensure_target_api_key(args) + if not tgt_api_key: + utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') + src_region = utils.ensure_source_region(args) + tgt_region = utils.ensure_target_region(args) + print_args(args, src_api_key, src_region, tgt_api_key, tgt_region) + if args.destinations: + migrate_destinations(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) + elif args.channels: + # TODO missing destinations_by_source_id argument! + migrate_channels(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) + else: + log.info("pass [--destinations | --channels] to fetch configuration") + + +if __name__ == '__main__': + main() diff --git a/migrate_workflows.py b/migrate_workflows.py new file mode 100644 index 0000000..121217c --- /dev/null +++ b/migrate_workflows.py @@ -0,0 +1,117 @@ +import os +import argparse +import fetchworkflows as fetchworkflows +import library.clients.workflowsclient as workflowsclient +import library.localstore as store +import library.migrationlogger as m_logger +import library.utils as utils + + +log = m_logger.get_logger(os.path.basename(__file__)) +nc = workflowsclient.WorkflowsClient() + + +def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): + log.info("Using sourceAccount : " + str(args.sourceAccount[0])) + log.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) + log.info("sourceRegion : " + src_region) + log.info("Using targetAccount : " + str(args.targetAccount[0])) + log.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) + log.info("targetRegion : " + tgt_region) + + +def configure_parser(): + parser = argparse.ArgumentParser(description='Migrate Workflows') + parser.add_argument('--sourceAccount', nargs=1, type=int, required=True, help='Source accountId') + parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key or \ + set environment variable ENV_SOURCE_API_KEY') + parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') + parser.add_argument('--targetAccount', nargs=1, type=int, required=True, help='Target accountId') + parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ + or set environment variable ENV_TARGET_API_KEY') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') + return parser + + +def create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region): + log.info(f"Creating workflow: {workflow['name']}") + nc.create_workflow(workflow, tgt_api_key, tgt_acct, tgt_region) + log.info(f"Created workflow: {workflow['name']}") + + +def migrate_workflows(src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region, channels_by_source_id, policies_by_source_id): + log.info('Workflows migration started.') + hasError = False + workflows_by_source_id = fetchworkflows.fetch_workflows(src_api_key, src_acct, src_region) + for workflow in workflows_by_source_id.values(): + log.info(f"Workflow name: {workflow['name']}") + # Enrich destinationConfigurations with target channel ids + log.info(f"Enriching destination configurations for target account: {tgt_acct}") + if 'destinationConfigurations' in workflow: + for destination_configuration in workflow['destinationConfigurations']: + if 'channelId' in destination_configuration: + source_channel_id = destination_configuration['channelId'] + if source_channel_id in channels_by_source_id: + channel = channels_by_source_id.get(source_channel_id) + if 'targetChannelId' in channel: + destination_configuration['targetChannelId'] = channel['targetChannelId'] + log.info(f"Target channel id: {destination_configuration['targetChannelId']} found for source channel id: {source_channel_id}") + else: + # hasError = True # TODO revert to error once slack and webhook destinations and channels are complete + log.warning(f"Unable to create workflow name: {workflow['name']}. Target channel id unavailable for source channel id: {source_channel_id} with type: {channel['type']}") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Source channel id: {source_channel_id} unavailable") + else: + hasError = True + log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no destinationConfigurations: {workflow}") + # Enrich issuesFilter with target account id and source policy ids + log.info(f"Enriching issues filter for target account: {tgt_acct}") + if "issuesFilter" in workflow: + workflow['issuesFilter']['targetAccountId'] = int(tgt_acct) + for predicate in workflow['issuesFilter']['predicates']: + targetValues = [] + for source_policy_id in predicate['values']: + if int(source_policy_id) in policies_by_source_id: + policy = policies_by_source_id.get(int(source_policy_id)) + if 'targetPolicyId' in policy: + targetValues.append(str(policy['targetPolicyId'])) + log.info(f"Target policy id: {str(policy['targetPolicyId'])} found for source policy id: {source_policy_id} ") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") + else: + hasError = True + log.error(f"Unable to create workflow name: {workflow['name']}. Target policy id unavailable for source policy id: {source_policy_id}") + if len(targetValues) > 0: + predicate['targetValues'] = targetValues + else: + hasError = True + log.info(f"Workflow name: {workflow['name']} with id: {workflow['id']} has no issuesFilter: {workflow}") + # Create the workflow + if not hasError: + create_workflow(workflow, tgt_acct, tgt_api_key, tgt_region) + else: + log.error(f"Unable to create workflow name: {workflow['name']}, {workflow}") + log.info('Workflows migration complete.') + return channels_by_source_id + + +def main(): + parser = configure_parser() + args = parser.parse_args() + src_api_key = utils.ensure_source_api_key(args) + if not src_api_key: + utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') + tgt_api_key = utils.ensure_target_api_key(args) + if not tgt_api_key: + utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') + src_region = utils.ensure_source_region(args) + tgt_region = utils.ensure_target_region(args) + print_args(args, src_api_key, src_region, tgt_api_key, tgt_region) + # TODO missing channels_by_source_id and policies_by_source_id arguments! + migrate_workflows(args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], tgt_api_key, tgt_region) + + +if __name__ == '__main__': + main() diff --git a/migratepolicies.py b/migratepolicies.py index 75bc1fc..aa182f8 100644 --- a/migratepolicies.py +++ b/migratepolicies.py @@ -205,6 +205,7 @@ def migrate_alert_policies(policy_names: List[str], src_account: int, src_api_key: str, src_region: str, tgt_account: int, tgt_api_key: str, tgt_region: str): logger.info('Alert migration started.') + policies_by_source_id = {} all_alert_status = {} if fetch_channels: logger.info('Fetching latest channel info and policy assignment. This may take a while.....') @@ -234,10 +235,15 @@ def migrate_alert_policies(policy_names: List[str], result = ac.create_alert_policy(tgt_api_key, src_policy, tgt_region) update_create_status(all_alert_status, policy_name, result) tgt_policy = result['policy'] + src_policy['targetPolicyId'] = tgt_policy['id'] + policies_by_source_id.setdefault(src_policy['id'], src_policy) # update_notification_channels(tgt_api_key, tgt_region, src_policy, tgt_policy, loaded_src_channels, # tgt_channels_by_type_name, all_alert_status) logger.info('Alert migration complete.') - return all_alert_status + return_dict = dict() + return_dict['all_alert_status'] = all_alert_status + return_dict['policies_by_source_id'] = policies_by_source_id + return return_dict def update_create_status(all_alert_status, policy_name, result): @@ -267,7 +273,7 @@ def migrate( use_local ) - status = migrate_alert_policies( + return_dict = migrate_alert_policies( policy_names, source_acct_id, source_api_key, @@ -277,6 +283,9 @@ def migrate( target_region ) + policies_by_source_id = return_dict['policies_by_source_id'] + status = return_dict['all_alert_status'] + status_file = ac.get_alert_status_file_name( policy_file_path, entity_file_path, @@ -286,7 +295,7 @@ def migrate( ) store.save_status_csv(status_file, status, askeys) - return status_file + return policies_by_source_id class MigratePoliciesCommand: