From 56dfbb8218d7b067c67bf6ca62a1ea3f798ed3f9 Mon Sep 17 00:00:00 2001 From: Adi Pandit <39402771+adiosspandit@users.noreply.github.com> Date: Thu, 27 May 2021 14:58:18 -0400 Subject: [PATCH] feat: Support EU Endpoints (#22) Added support for EU endpoints for most scripts. --- README.md | 31 ++-- config.ini.example | 24 ++- deleteallmonitors.py | 67 +++----- deletemonitors.py | 72 ++++----- fetchchannels.py | 29 ++-- fetchentities.py | 24 +-- fetchmonitors.py | 19 ++- library/clients/alertsclient.py | 165 +++++++++----------- library/clients/dbentityclient.py | 108 +++++++++++++ library/clients/endpoints.py | 90 +++++++++++ library/clients/entityclient.py | 170 ++++++++++----------- library/clients/insightsclient.py | 7 +- library/clients/monitorsclient.py | 43 ++++-- library/localstore.py | 2 + library/migrator/app_conditions.py | 16 +- library/migrator/extsvc_conditions.py | 17 ++- library/migrator/infra_conditions.py | 13 +- library/migrator/loc_failure_conditions.py | 14 +- library/migrator/nrql_conditions.py | 10 +- library/migrator/synth_conditions.py | 18 ++- library/securecredentials.py | 18 +-- library/utils.py | 60 +++++++- migrate_apm.py | 60 ++++---- migrate_dashboards.py | 64 ++++---- migrateconditions.py | 129 +++++++++++----- migratemonitors.py | 30 ++-- migratepolicies.py | 110 +++++++++---- nrmig | 3 + updatemonitors.py | 32 ++-- 29 files changed, 935 insertions(+), 510 deletions(-) create mode 100644 library/clients/dbentityclient.py create mode 100644 library/clients/endpoints.py diff --git a/README.md b/README.md index 26f44b5..a239ab6 100644 --- a/README.md +++ b/README.md @@ -120,7 +120,7 @@ APM Configuration #### 1) python3 fetchmonitors.py ``` -usage: fetchmonitors.py [-h] --sourceAccount SOURCEACCOUNT +usage: fetchmonitors.py --sourceAccount SOURCEACCOUNT --region [ us (default) |eu ] --sourceApiKey SOURCEAPIKEY --insightsQueryKey INSIGHTSQUERYKEY --toFile TOFILE @@ -129,6 +129,7 @@ usage: fetchmonitors.py [-h] --sourceAccount SOURCEACCOUNT Parameter | Note ---------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------- sourceAccount | Account to fetch monitors from +region | Optional region us (default) or eu sourceApiKey | This should be a User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Synthetics insightsQueryKey | must be supplied to fetch secure credentials from Insights for any monitors that ran in the past 7 days. Secure credentials fetching is skipped if this is not passed. toFile | should only be a file name e.g. soure-monitors.csv. It will always be created in output/ directory @@ -139,7 +140,7 @@ toFile | should only be a file name e.g. soure-monitors.csv. It will a #### 3) python3 fetchchannels.py (optional if you want to use --useLocal option during migratepolicies) -`usage: fetchalerts.py [-h] --sourceAccount SOURCEACCOUNT [--sourceApiKey SOURCEAPIKEY]` +`usage: fetchchannels.py --sourceAccount SOURCEACCOUNT [--sourceApiKey SOURCEAPIKEY] --region [ us (default) |eu ]` Fetches alert channels and builds a dictionary mapping channels to policy_id. @@ -149,14 +150,16 @@ During migratepolicies the stored alert_channels can be used by passing --useLoc #### 4) python3 migratemonitors.py -`usage: migratemonitors.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal]` +`usage: migratemonitors.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] --timeStamp TIMESTAMP [--useLocal]` Parameter | Note ------------- | -------------------------------------------------------------------------------------------------------- fromFile | Must contain monitor names one per line. The fetchentities.py script can be used to help generate this list of monitors. sourceAccount | Account to fetch monitors from +sourceRegion | Optional region us (default) or eu sourceApiKey | This should be a User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Synthetics targetAccount | Account to migrate monitors to +targetRegion | Optional region us (default) or eu targetApiKey | This should be a User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Synthetics timeStamp | must match the timeStamp generated in fetchmonitors , used when useLocal flag is passed useLocal | By default monitors are fetched from sourceAccount. A pre-fetched copy can be used by passing this flag. @@ -179,7 +182,7 @@ A value of 0 CHECK_COUNT for scripted monitors indicates it has not run in the p #### 5) python3 migratepolicies.py -`usage: migratepolicies.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetApiKey TARGETAPIKEY] [--useLocal]` +`usage: migratepolicies.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--useLocal]` Parameter | Note ---------------- | ------------------------------------------------------------------------------------------------------ @@ -187,8 +190,10 @@ fromFile | must contain alert policy names one per line fromFileEntities | must contain APM, Browser, or Mobile application names or IDs or APM KT names or IDs (not GUIDs) personalApiKey | Personal API Key used for GraphQL API Client calls sourceAccount | Account to fetch monitors from +sourceRegion | Optional region us (default) or eu sourceApiKey | User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Alerts targetAccount | Account to migrate policies to +targetRegion | Optional region us (default) or eu targetApiKey | User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Alerts useLocal | By alert channels are fetched from sourceAccount. A pre-fetched copy can be used by passing this flag. @@ -261,7 +266,7 @@ to move will be the union of both. Any target APM , Browser, Mobile apps and Key transactions must be migrated manually. -`usage: migrateconditions.py [-h] --fromFile FROMFILE --personalApiKey PERSONALAPIKEY --sourceAccount SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetApiKey TARGETAPIKEY] [--matchSourceState] [--synthetics --app_conditions --nrql_conditions --infra_conditions]` +`usage: migrateconditions.py [-h] --fromFile FROMFILE --personalApiKey PERSONALAPIKEY --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --sourceApiKey SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] [--matchSourceState] [--synthetics --app_conditions --nrql_conditions --infra_conditions]` Parameter | Note -------------- | -------------------------------------------------- @@ -269,8 +274,10 @@ fromFile | must contain alert policy names one per line fromFileEntities | must contain APM, Browser, or Mobile application names or IDs or APM KT names or IDs (not GUIDs) personalApiKey | Personal API Key used for GraphQL API Client calls sourceAccount | Account to fetch monitors from +sourceRegion | Optional region us (default) or eu sourceApiKey | User API Key for sourceAccount for a user with admin (or add on / custom role equivalent) access to Alerts targetAccount | Account to migrate policies to +targetRegion | Optional region us (default) or eu targetApiKey | User API Key for targetAccount for a user with admin (or add on / custom role equivalent) access to Alerts matchSourceState | Match alert condition enabled/disabled state from the source account in the target account. By default, all copied alert conditions are disabled in the target account. synthetics | Pass this flag to migrate synthetic conditions @@ -307,9 +314,9 @@ if `--app_conditions` is specified. Migrate APM Apdex configuration settings. **This no longer migrates labels.** Please use migratetags.py instead for tag migrations. -usage: migrate_apm.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT +usage: migrate_apm.py --fromFile FROMFILE --sourceAccount SOURCEACCOUNT [--sourceRegion SOURCEREGION] --personalApiKey PERSONALAPIKEY --sourceApiKey - SOURCEAPIKEY --targetAccount TARGETACCOUNT + SOURCEAPIKEY --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --targetApiKey TARGETAPIKEY [--settings] ##### Note: Ensure target apps are running or were running recently so that the target ids can be picked @@ -317,9 +324,9 @@ usage: migrate_apm.py [-h] --fromFile FROMFILE --sourceAccount SOURCEACCOUNT #### 8) python3 migrate_dashboards.py -usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount +usage: migrate_dashboards.py [-h] --fromFile FROMFILE --sourceAccount [--sourceRegion SOURCEREGION] SOURCEACCOUNT --sourceApiKey SOURCEAPIKEY - --targetAccount TARGETACCOUNT + --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] [--targetApiKey TARGETAPIKEY] Migrate dashboards between accounts, including modifying queries to point to the new target account. The fetchentities.py script can help create the file to pass with fromFile. @@ -355,7 +362,7 @@ synthetics | Pass this flag to migrate Synthetic monitor entity tags Potential use is for renaming/disabling migrated monitors in source account. -`usage: updatemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT --timeStamp TIMESTAMP [--renamePrefix RENAMEPREFIX] [--disable]` +`usage: updatemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP [--renamePrefix RENAMEPREFIX] [--disable]` Parameter | Note ------------- | ------------------------------------------------------------------------- @@ -405,7 +412,7 @@ synthetics | Pass this flag to list Synthetic monitor entities #### 12) python3 deletemonitors.py -`usage: deletemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT --timeStamp TIMESTAMP` +`usage: deletemonitors.py [-h] --fromFile FROMFILE [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION] --timeStamp TIMESTAMP` Will delete monitors listed one per line in --fromFile and stored in db/targetaccount/monitors/timeStamp. The fetchentities.py script can help generate this file. @@ -413,7 +420,7 @@ Will delete monitors listed one per line in --fromFile and stored in db/targetac #### Warning: All monitors in target account will be deleted -`usage: deleteallmonitors.py [-h] [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT` +`usage: deleteallmonitors.py [-h] [--targetApiKey TARGETAPIKEY] --targetAccount TARGETACCOUNT [--targetRegion TARGETREGION]` deleteallmonitors fetches all the monitors. Backs them up in db/accountId/monitors/timeStamp-bakup And deletes all the monitors diff --git a/config.ini.example b/config.ini.example index 13219d0..3f767cb 100644 --- a/config.ini.example +++ b/config.ini.example @@ -1,7 +1,27 @@ [default] -personal_api_key: 123456 + [migrate.policies] -policies_file: /tmp/policies.txt +soure_account: 12345 +source_region: us +source_api_key: ASFASFDASFSADF +target_account: 98765 +target_region: us +target_api_key: ASFASFASFASFASF +policies_file: output/us-policies.txt entities_file: /tmp/entities.txt +[migrate.conditions] +source_account_id: 12345 +source_region: us +source_api_key: ASDASFDSAFASFD +target_account_id: 98765 +target_region: us +target_api_key: DFDFSDFSDFDF +policy_file: output/us-policies.txt +all: true +app_conditions: true +synthetics: true +nrql_conditions: true +ext_svc_conditions: true +infra_conditions: true diff --git a/deleteallmonitors.py b/deleteallmonitors.py index 0d36d5d..614299b 100644 --- a/deleteallmonitors.py +++ b/deleteallmonitors.py @@ -6,6 +6,8 @@ import library.localstore as localstore import library.migrationlogger as migrationlogger import library.clients.monitorsclient as monitorsclient +import library.utils as utils +from library.clients.endpoints import Endpoints # This script has been built for TESTING purpose only. Use at your own risk. @@ -15,60 +17,35 @@ # Example use same account id for source and target # deleteallmonitors deletes all the monitors in the targetAccount logger = migrationlogger.get_logger(os.path.basename(__file__)) -headers = {} -monitors_url = 'https://synthetics.newrelic.com/synthetics/api/v3/monitors/' -parser = argparse.ArgumentParser(description='Delete Monitors from an account') -from_api_key = "" -def setup_params(): +def configure_parser(): + parser = argparse.ArgumentParser(description='Delete Monitors from an account') parser.add_argument('--targetApiKey', type=str, nargs=1, required=False, help='API Key for the account') parser.add_argument('--targetAccount', type=str, nargs=1, required=True, help='Target account') + parser.add_argument('--region', type=str, nargs=1, required=False, help='region us(default) or eu') + return parser -def print_args(): +def print_args(args, region): logger.info("Using targetApiKey : " + len(args.targetApiKey[0][:-4])*"*"+args.targetApiKey[0][-4:]) logger.info("Using targetAccount : " + str(args.targetAccount[0])) - - -def setup_headers(api_key): - if api_key: - target_api_key = api_key + if args.region and len(args.region) > 0: + logger.info("region : " + args.region[0]) else: - target_api_key = os.environ.get('ENV_TARGET_API_KEY') - headers['Api-Key'] = target_api_key - if not headers['Api-Key']: - logger.error('Error: Missing API Key. either pass as param ---targetApiKey or \ - environment variable ENV_TARGET_API_KEY.\n \ - e.g. export ENV_TARGET_API_KEY="NRNA7893asdfhkh"') - sys.exit() + logger.info("region not passed : Defaulting to " + region) -def delete(monitors, target_acct): +def delete(monitors, target_acct, tgt_api_key, region): success_status = {} failure_status = {} for monitor in monitors: - delete_monitor(monitor, target_acct, failure_status, success_status) + monitorsclient.delete_monitor(monitor, target_acct, failure_status, success_status, tgt_api_key, region) return {'success': success_status, 'failure': failure_status} -def delete_monitor(monitor, target_acct, failure_status, success_status): - monitor_id = monitor['id'] - monitor_name = monitor['name'] - response = requests.delete(monitors_url + monitor_id, headers=headers) - if response.status_code == 204: - success_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} - logger.info(target_acct + ":" + monitor_name + ":" + str(success_status[monitor_name])) - else: - failure_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} - logger.info(target_acct + ":" + monitor_name + ":" + str(failure_status[monitor_name])) - # trying to stay within 3 requests per second - time.sleep(0.3) - - -def delete_all_monitors(api_key, target_acct): - setup_headers(api_key) - all_monitors_def_json = monitorsclient.fetch_all_monitors(api_key) +def delete_all_monitors(api_key, target_acct, region): + all_monitors_def_json = monitorsclient.fetch_all_monitors(api_key, region) timestamp = time.strftime("%Y-%m%d-%H%M%S") + "-bakup" storage_dir = localstore.create_storage_dirs(target_acct, timestamp) monitor_names_file = localstore.create_output_file("monitors-" + timestamp + ".csv") @@ -79,14 +56,20 @@ def delete_all_monitors(api_key, target_acct): monitor_names_out.write(monitor_name + "\n") localstore.save_monitor_to_file(monitor_name, storage_dir, monitor_json) logger.info("Backed up %d monitors in %s before deleting", len(all_monitors_def_json), storage_dir) - del_response = delete(all_monitors_def_json, target_acct) + del_response = delete(all_monitors_def_json, target_acct, api_key, region) logger.debug(del_response) -if __name__ == '__main__': +def main(): start_time = time.time() - setup_params() + parser = configure_parser() args = parser.parse_args() - print_args() - delete_all_monitors(args.targetApiKey[0], args.targetAccount[0]) + region = utils.ensure_region(args) + tgt_api_key = utils.ensure_target_api_key(args) + print_args(args, region) + delete_all_monitors(tgt_api_key, args.targetAccount[0], region) logger.info("Time taken : " + str(time.time() - start_time) + " seconds.") + + +if __name__ == '__main__': + main() diff --git a/deletemonitors.py b/deletemonitors.py index b03af57..ab73503 100644 --- a/deletemonitors.py +++ b/deletemonitors.py @@ -5,6 +5,8 @@ import time import library.localstore as store import library.migrationlogger as m_logger +import library.utils as utils +import library.clients.monitorsclient as monitorsclient # deletemonitors deletes a list of monitors provided in one per line in a csv file @@ -12,68 +14,56 @@ # The fromFile, targetAccountId, targetApiKey and timeStamp must be specified logger = m_logger.get_logger(os.path.basename(__file__)) headers = {} -monitors_url = 'https://synthetics.newrelic.com/synthetics/api/v3/monitors/' -parser = argparse.ArgumentParser(description='Delete Monitors from an account') from_api_key = "" -def setup_params(): +def configure_parser(): + parser = argparse.ArgumentParser(description='Delete Monitors from an account') parser.add_argument('--fromFile', nargs=1, required=True, help='Path to file with monitor names, one per line') parser.add_argument('--targetApiKey', nargs=1, required=False, help='API Key for the account') parser.add_argument('--targetAccount', nargs=1, required=True, help='Target account') parser.add_argument('--timeStamp', nargs=1, required=True, help='Timestamp of the pre-fetched monitors') + parser.add_argument('--region', type=str, nargs=1, required=False, help='region us(default) or eu') + return parser -def print_args(): +def print_args(args, region): logger.info("Using fromFile : " + args.fromFile[0]) logger.info("Using targetApiKey : " + args.targetApiKey[0]) logger.info("Using targetAccount : " + str(args.targetAccount[0])) logger.info("Using timeStamp : " + args.timeStamp[0]) - - -def setup_headers(): - if args.targetApiKey: - target_api_key = args.targetApiKey[0] + if args.region and len(args.region) > 0: + logger.info("region : " + args.region[0]) else: - target_api_key = os.environ.get('ENV_TARGET_API_KEY') - headers['Api-Key'] = target_api_key - if not headers['Api-Key']: - logger.error('Error: Missing API Key. either pass as param ---targetApiKey or \ - environment variable ENV_TARGET_API_KEY.\n \ - e.g. export ENV_TARGET_API_KEY="NRNA7893asdfhkh"') - sys.exit() + logger.info("region not passed : Defaulting to " + region) -def delete(monitors): +def delete(monitor_definitions, target_account, tgt_api_key, region): success_status = {} failure_status = {} - for monitor in monitors: - monitor_id = monitor['definition']['id'] - monitor_name = monitor['definition']['name'] - target_account = str(args.targetAccount[0]) - response = requests.delete(monitors_url + monitor_id, headers=headers) - if response.status_code == 204: - success_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} - logger.info(target_account + ":" + monitor_name + ":" + str(success_status[monitor_name])) - else: - failure_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} - logger.info(target_account + ":" + monitor_name + ":" + str(failure_status[monitor_name])) - # trying to stay within 3 requests per second - time.sleep(0.5) + for monitor_definition in monitor_definitions: + monitorsclient.delete_monitor(monitor_definition['definition'], target_account, failure_status, success_status, + tgt_api_key, region) return {'success': success_status, 'failure': failure_status} -def delete_monitors(): - monitor_names = store.load_names(args.fromFile[0]) - monitors = store.load_monitors(args.targetAccount[0], args.timeStamp[0], monitor_names) - del_response = delete(monitors) +def delete_monitors(from_file, tgt_account, time_stamp, tgt_api_key, region): + monitor_names = store.load_names(from_file) + monitor_definitions = store.load_monitors(tgt_account, time_stamp, monitor_names) + del_response = delete(monitor_definitions, tgt_account, tgt_api_key, region) logger.debug(del_response) -start_time = time.time() -setup_params() -args = parser.parse_args() -setup_headers() -print_args() -delete_monitors() -logger.info("Time taken : " + str(time.time() - start_time) + " seconds.") +def main(): + start_time = time.time() + parser = configure_parser() + args = parser.parse_args() + region = utils.ensure_region(args) + tgt_api_key = utils.ensure_target_api_key(args) + print_args(args, region) + delete_monitors(args.fromFile[0], args.targetAccount[0], args.timeStamp[0], tgt_api_key, region) + logger.info("Time taken : " + str(time.time() - start_time) + " seconds.") + + +if __name__ == '__main__': + main() diff --git a/fetchchannels.py b/fetchchannels.py index 826cf7e..0a7da14 100644 --- a/fetchchannels.py +++ b/fetchchannels.py @@ -5,6 +5,7 @@ import library.localstore as store import library.migrationlogger as migrationlogger import library.clients.alertsclient as ac +import library.utils as utils logger = migrationlogger.get_logger(os.path.basename(__file__)) @@ -16,11 +17,16 @@ def setup_params(): parser.add_argument('--sourceAccount', type=str, nargs=1, required=True, help='Source accountId to store the alerts') parser.add_argument('--sourceApiKey', type=str, nargs=1, required=False, help='Source API Key or \ set env var ENV_SOURCE_API_KEY') + parser.add_argument('--region', type=str, nargs=1, required=False, help='region us(default) or eu') -def print_params(): +def print_params(args, source_api_key, region): logger.info("Using sourceAccount : " + str(args.sourceAccount[0])) logger.info("Using sourceApiKey : " + len(source_api_key[:-4]) * "*" + source_api_key[-4:]) + if args.region and len(args.region) > 0: + logger.info("region : " + args.region[0]) + else: + logger.info("region not passed : Defaulting to " + region) def setup_headers(api_key): @@ -48,8 +54,8 @@ def validate_keys(): # alert_policy_id: [channel_id] # } # links": { "policy_ids": [] } -def get_channels_by_id_policy(api_key): - src_channels = ac.get_channels(api_key) +def get_channels_by_id_policy(api_key, region): + src_channels = ac.get_channels(api_key, region) channels = {"channels_by_id": {}, "channels_by_policy_id": {}} for channel in src_channels[ac.CHANNELS]: channel_id = str(channel['id']) @@ -63,19 +69,24 @@ def get_channels_by_id_policy(api_key): return channels -def fetch_alert_channels(api_key, account_id): - all_channels = get_channels_by_id_policy(api_key) +def fetch_alert_channels(api_key, account_id, region): + all_channels = get_channels_by_id_policy(api_key, region) store.save_alert_channels(account_id, all_channels) -if __name__ == '__main__': +def main(): start_time = time.time() setup_params() args = parser.parse_args() args_api_key = '' if args.sourceApiKey: args_api_key = args.sourceApiKey[0] + region = utils.ensure_region(args) setup_headers(args_api_key) - print_params() - fetch_alert_channels(args_api_key, args.sourceAccount[0]) - logger.info("Time taken : " + str(time.time() - start_time) + "seconds") \ No newline at end of file + print_params(args, source_api_key, region) + fetch_alert_channels(args_api_key, args.sourceAccount[0], region) + logger.info("Time taken : " + str(time.time() - start_time) + "seconds") + + +if __name__ == '__main__': + main() diff --git a/fetchentities.py b/fetchentities.py index f81dd08..75e1ccc 100644 --- a/fetchentities.py +++ b/fetchentities.py @@ -10,11 +10,13 @@ logger = migrationlogger.get_logger(os.path.basename(__file__)) args = None + def setup_params(): parser = argparse.ArgumentParser(description='Migrate entity tags from one account to another') parser.add_argument('--sourceAccount', nargs=1, required=True, help='Source accountId') parser.add_argument('--sourceApiKey', nargs=1, required=False, help='Source API Key or \ set env var ENV_SOURCE_API_KEY') + parser.add_argument('--region', type=str, nargs=1, required=False, help='region us(default) or eu') parser.add_argument('--toFile', nargs=1, required=True, help='File to populate entity names. ' 'This will be created in output directory') parser.add_argument('--synthetics', dest='synthetics', required=False, action='store_true', help='Pass --synthetics to list matching Synthetic monitor entities') @@ -31,9 +33,13 @@ def setup_params(): return parser -def print_params(args, source_api_key, entity_types): +def print_params(args, source_api_key, entity_types, region): logger.info("Using sourceAccount : " + str(args.sourceAccount[0])) logger.info("Using sourceApiKey : " + len(source_api_key[:-4])*"*"+source_api_key[-4:]) + if args.region and len(args.region) > 0: + logger.info("region : " + args.region[0]) + else: + logger.info("region not passed : Defaulting to " + region) logger.info("Using entity types : " + str(entity_types)) if args.tagName: logger.info("Using tag name " + str(args.tagName[0]) + " and tag value " + str(args.tagValue[0])) @@ -64,10 +70,10 @@ def parse_entity_types(args): return entity_types -def fetch_entities(src_account_id, src_api_key, entity_types, output_file, tag_name = None, tag_value = None): +def fetch_entities(src_account_id, src_api_key, entity_types, output_file, *, tag_name=None, tag_value=None, region='us'): entity_names = [] for entity_type in entity_types: - entities = ec.gql_get_entities_by_type(src_api_key, entity_type, src_account_id, tag_name, tag_value) + entities = ec.gql_get_entities_by_type(src_api_key, entity_type, src_account_id, tag_name, tag_value, region) for entity in entities['entities']: entity_names.append(entity['name']) entity_names_file = store.create_output_file(output_file) @@ -100,15 +106,15 @@ def main(): if args.tagValue is not None and args.tagName is None: logger.error('tagName is required when tagValue is set') sys.exit() - - - print_params(args, source_api_key, entity_types) + region = utils.ensure_region(args) + print_params(args, source_api_key, entity_types, region) if args.tagName is None: - fetch_entities(args.sourceAccount[0], source_api_key, entity_types, args.toFile[0]) + fetch_entities(args.sourceAccount[0], source_api_key, entity_types, args.toFile[0], region=region) else: - fetch_entities(args.sourceAccount[0], source_api_key, entity_types, args.toFile[0], args.tagName[0], args.tagValue[0]) + fetch_entities(args.sourceAccount[0], source_api_key, entity_types, args.toFile[0], tag_name=args.tagName[0], + tag_value=args.tagValue[0], region=region) if __name__ == '__main__': - main() \ No newline at end of file + main() diff --git a/fetchmonitors.py b/fetchmonitors.py index e1624db..32a4af2 100644 --- a/fetchmonitors.py +++ b/fetchmonitors.py @@ -7,6 +7,7 @@ import library.clients.monitorsclient as mc import library.migrationlogger as m_logger import library.securecredentials as securecredentials +import library.utils as utils # fetch monitors from an account # sourceAccount : account to fetch the monitors from @@ -32,6 +33,7 @@ def setup_params(): set env var ENV_SOURCE_API_KEY') parser.add_argument('--insightsQueryKey', type=str, nargs=1, required=False, help='Insights Query Key to ' 'fetch secure credentials') + parser.add_argument('--region', type=str, nargs=1, required=False, help='region us(default) or eu') parser.add_argument('--toFile', nargs=1, required=True, help='File to populate monitor names. ' 'This will be created in output directory') @@ -39,6 +41,10 @@ def setup_params(): def print_params(): logger.info("Using sourceAccount : " + str(args.sourceAccount[0])) logger.info("Using sourceApiKey : " + len(source_api_key[:-4])*"*"+source_api_key[-4:]) + if args.region and len(args.region) > 0: + logger.info("region : " + args.region[0]) + else: + logger.info("region not passed : Defaulting to " + region) if args.insightsQueryKey and len(args.insightsQueryKey) > 0: logger.info("Using insightsQueryKey to fetch secure credentials : " + len(args.insightsQueryKey[0][:-4]) * "*" + args.insightsQueryKey[0][-4:]) @@ -67,15 +73,15 @@ def validate_keys(): sys.exit() -def populate_secure_credentials(monitor_json, src_account, insights_key): +def populate_secure_credentials(monitor_json, src_account, insights_key, region): if insights_key: sec_credentials_checks = securecredentials.from_insights( - insights_key, src_account, monitor_json['definition']['name']) + insights_key, src_account, monitor_json['definition']['name'], region) monitor_json.update(sec_credentials_checks) -def fetch_monitors(api_key, account_id, output_file, insights_key=''): - all_monitors_def_json = mc.fetch_all_monitors(api_key) +def fetch_monitors(api_key, account_id, output_file, insights_key='', region='us'): + all_monitors_def_json = mc.fetch_all_monitors(api_key, region) monitors_count = len(all_monitors_def_json) if monitors_count <= 0: logger.warn("No monitors found in account " + account_id) @@ -91,7 +97,7 @@ def fetch_monitors(api_key, account_id, output_file, insights_key=''): monitor_name = store.sanitize(monitor_def_json['name']) monitor_names_out.write(monitor_name + "\n") if monitortypes.is_scripted(monitor_json['definition']): - populate_secure_credentials(monitor_json, account_id, insights_key) + populate_secure_credentials(monitor_json, account_id, insights_key, region) mc.populate_script(api_key, monitor_json, monitor_json['definition']['id']) store.save_monitor_to_file(monitor_name, storage_dir, monitor_json) logger.info("Fetched %d monitors in %s", len(all_monitors_def_json), storage_dir) @@ -106,6 +112,7 @@ def fetch_monitors(api_key, account_id, output_file, insights_key=''): args_insights_key = '' if args.insightsQueryKey: args_insights_key = args.insightsQueryKey[0] + region = utils.ensure_region(args) print_params() - fetch_monitors(source_api_key, str(args.sourceAccount[0]), args.toFile[0], args_insights_key) + fetch_monitors(source_api_key, str(args.sourceAccount[0]), args.toFile[0], args_insights_key, ) logger.info("Time taken : " + str(time.time() - start_time) + "seconds") diff --git a/library/clients/alertsclient.py b/library/clients/alertsclient.py index d8bdc80..1ca0aa1 100644 --- a/library/clients/alertsclient.py +++ b/library/clients/alertsclient.py @@ -6,54 +6,27 @@ import library.localstore as store import re import library.clients.entityclient as ec +from library.clients.endpoints import Endpoints logger = migrationlogger.get_logger(os.path.basename(__file__)) -ALERT_POLICIES_URL = 'https://api.newrelic.com/v2/alerts_policies.json' -DEL_ALERTS_URL = 'https://api.newrelic.com/v2/alerts_policies/' - POLICIES = "policies" - -GET_APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions.json' -APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions/' -CREATE_APP_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_conditions/policies/' CONDITIONS = 'conditions' ENTITIES = 'entities' CONDITION = 'condition' - -GET_SYNTH_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions.json' -CREATE_SYNTHETICS_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions/policies/' SYNTH_CONDITIONS = 'synthetics_conditions' SYNTH_CONDITION = 'synthetics_condition' - -LOC_FAILURE_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_location_failure_conditions/policies/' LOCATION_FAILURE_CONDITIONS = 'location_failure_conditions' LOCATION_FAILURE_CONDITION = 'location_failure_condition' - -NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions.json' NRQL_CONDITIONS = 'nrql_conditions' -CREATE_NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions/policies/' NRQL_CONDITION = 'nrql_condition' - -EXTSVC_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions.json' -CREATE_EXTSVC_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions/policies/' EXTSVC_CONDITIONS = 'external_service_conditions' EXTSVC_CONDITION = 'external_service_condition' - -INFRA_CONDITIONS_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions' -CREATE_INFRA_CONDITION_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions' INFRA_CONDITIONS = 'data' INFRA_CONDITION = 'data' INFRA_PAGINATION = 'infra' - -ALERTS_CHANNEL_URL = 'https://api.newrelic.com/v2/alerts_channels.json' -DEL_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_channels/' CHANNELS = "channels" -ALERT_POLICY_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_policy_channels.json' - -ENTITY_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_entity_conditions' ENTITY_CONDITIONS = 'entity_conditions' - MONITOR_ID = 'monitor_id' SOURCE_POLICY_ID = 'source_policy_id' POLICY_NAME = 'policy_name' @@ -62,14 +35,15 @@ def setup_headers(api_key): return {'Api-Key': api_key, 'Content-Type': 'application/json'} -def get_all_alert_policies(api_key): - return utils.get_paginated_entities(api_key, ALERT_POLICIES_URL, POLICIES) +def get_all_alert_policies(api_key, region=Endpoints.REGION_US): + return utils.get_paginated_entities(api_key, Endpoints.of(region).ALERT_POLICIES_URL, POLICIES) -def get_policy(api_key, name): + +def get_policy(api_key, name, region=Endpoints.REGION_US): filter_params = {'filter[name]': name, 'filter[exact_match]': True} result = {'policyFound': False} - response = requests.get(ALERT_POLICIES_URL, headers=setup_headers(api_key), params=filter_params) + response = requests.get(Endpoints.of(region).ALERT_POLICIES_URL, headers=setup_headers(api_key), params=filter_params) result['status'] = response.status_code if response.status_code in [200, 304]: policies = response.json()['policies'] @@ -83,27 +57,27 @@ def get_policy(api_key, name): return result -def get_channels(api_key): - return utils.get_paginated_entities(api_key, ALERTS_CHANNEL_URL, CHANNELS) +def get_channels(api_key, region=Endpoints.REGION_US): + return utils.get_paginated_entities(api_key, Endpoints.of(region).ALERTS_CHANNEL_URL, CHANNELS) -def get_synthetic_conditions(api_key, alert_id): +def get_synthetic_conditions(api_key, alert_id, region=Endpoints.REGION_US): params = {'policy_id': alert_id} - return utils.get_paginated_entities(api_key, GET_SYNTH_CONDITIONS_URL, SYNTH_CONDITIONS, params) + return utils.get_paginated_entities(api_key, Endpoints.of(region).GET_SYNTH_CONDITIONS_URL, SYNTH_CONDITIONS, params) -def get_location_failure_conditions(api_key, policy_id): - get_url = LOC_FAILURE_CONDITIONS_URL + str(policy_id) + '.json' +def get_location_failure_conditions(api_key, policy_id, region=Endpoints.REGION_US): + get_url = Endpoints.of(region).LOC_FAILURE_CONDITIONS_URL + str(policy_id) + '.json' return utils.get_paginated_entities(api_key, get_url, LOCATION_FAILURE_CONDITIONS) -def get_nrql_conditions(api_key, account_id, policy_id): - return ec.get_nrql_conditions(api_key, account_id, policy_id) +def get_nrql_conditions(api_key, account_id, policy_id, region): + return ec.get_nrql_conditions(api_key, account_id, policy_id, region) -def nrql_conditions_by_name(api_key, account_id, policy_id): +def nrql_conditions_by_name(api_key, account_id, policy_id, region): conditions_by_name = {} - result = get_nrql_conditions(api_key, account_id, policy_id) + result = get_nrql_conditions(api_key, account_id, policy_id, region) if result['error']: return { 'error': result['error'], @@ -121,6 +95,7 @@ def nrql_conditions_by_name(api_key, account_id, policy_id): def create_nrql_condition( api_key, + region, account_id, policy_id, nrql_condition, @@ -128,6 +103,7 @@ def create_nrql_condition( ): return ec.create_nrql_condition( api_key, + region, account_id, policy_id, nrql_condition, @@ -135,31 +111,34 @@ def create_nrql_condition( ) -def get_app_conditions(api_key, alert_id): +def get_app_conditions(api_key, alert_id, region=Endpoints.REGION_US): params = {'policy_id': alert_id} - return utils.get_paginated_entities(api_key, GET_APP_CONDITIONS_URL, CONDITIONS, params) + return utils.get_paginated_entities(api_key, Endpoints.of(region).GET_APP_CONDITIONS_URL, CONDITIONS, params) -def get_extsvc_conditions(api_key, policy_id): +def get_extsvc_conditions(api_key, policy_id, region=Endpoints.REGION_US): params = {'policy_id': policy_id} - return utils.get_paginated_entities(api_key, EXTSVC_CONDITIONS_URL, EXTSVC_CONDITIONS, params) + return utils.get_paginated_entities(api_key, Endpoints.of(region).EXTSVC_CONDITIONS_URL, EXTSVC_CONDITIONS, params) + -def get_infra_conditions(api_key, policy_id): +def get_infra_conditions(api_key, policy_id, region=Endpoints.REGION_US): params = {'policy_id': policy_id, 'limit': 50, 'offset': 0} - return utils.get_paginated_entities(api_key, INFRA_CONDITIONS_URL, INFRA_CONDITIONS, params, INFRA_PAGINATION) + return utils.get_paginated_entities(api_key, Endpoints.of(region).INFRA_CONDITIONS_URL, INFRA_CONDITIONS, params, INFRA_PAGINATION) + -def get_entity_conditions(api_key, entity_id, entity_type): - url = '%s/%s.json' % (ENTITY_CONDITIONS_URL, str(entity_id)) +def get_entity_conditions(api_key, entity_id, entity_type, region=Endpoints.REGION_US): + url = '%s/%s.json' % (Endpoints.of(region).ENTITY_CONDITIONS_URL, str(entity_id)) params = {'entity_type': entity_type} return utils.get_paginated_entities(api_key, url, ENTITY_CONDITIONS, params) -def create_channel(api_key, channel): + +def create_channel(api_key, channel, region=Endpoints.REGION_US): target_channel = {'channel': {'name': channel['name'], 'type': channel['type']}} if 'configuration' in channel: target_channel['channel']['configuration'] = channel['configuration'] prepare_channel(target_channel['channel']) result = {} - response = requests.post(ALERTS_CHANNEL_URL, headers=setup_headers(api_key), + response = requests.post(Endpoints.of(region).ALERTS_CHANNEL_URL, headers=setup_headers(api_key), data=json.dumps(target_channel, indent=2)) result['status'] = response.status_code if response.status_code != 201: @@ -191,11 +170,12 @@ def prepare_channel(channel): channel['configuration']['url'] = 'dummy-dummy-dummy' -def put_channel_ids(api_key, policy_id, channel_ids): +def put_channel_ids(api_key, policy_id, channel_ids, region=Endpoints.REGION_US): param_channels = ','.join(str(e) for e in channel_ids) params = {'policy_id': policy_id, 'channel_ids': param_channels} result = {} - response = requests.put(ALERT_POLICY_CHANNELS_URL, headers=setup_headers(api_key), params=params) + response = requests.put(Endpoints.of(region).ALERT_POLICY_CHANNELS_URL, headers=setup_headers(api_key), + params=params) result['status'] = response.status_code if response.status_code == 200: result['channel_ids'] = response.json()['policy']['channel_ids'] @@ -207,11 +187,13 @@ def put_channel_ids(api_key, policy_id, channel_ids): return result -def create_alert_policy(api_key, source_policy): +def create_alert_policy(api_key, source_policy, region=Endpoints.REGION_US): policy_name = source_policy['name'] alert_policy = {'policy': {'incident_preference': source_policy['incident_preference'], 'name': policy_name}} result = {'entityCreated': False} - response = requests.post(ALERT_POLICIES_URL, headers=setup_headers(api_key), data=json.dumps(alert_policy)) + logger.info('Using endpoint ' + Endpoints.of(region).ALERT_POLICIES_URL) + response = requests.post(Endpoints.of(region).ALERT_POLICIES_URL, headers=setup_headers(api_key), + data=json.dumps(alert_policy)) result['status'] = response.status_code if response.status_code != 201: logger.error("Error creating : " + policy_name) @@ -225,42 +207,42 @@ def create_alert_policy(api_key, source_policy): return result -def delete_policy(api_key, policy_id): - delete_url = DEL_ALERTS_URL + str(policy_id) + '.json' +def delete_policy(api_key, policy_id, region=Endpoints.REGION_US): + delete_url = Endpoints.of(region).DEL_ALERTS_URL + str(policy_id) + '.json' result = requests.delete(delete_url, headers=setup_headers(api_key)) logger.info(result.url) return result -def delete_channel(api_key, channel_id): - delete_url = DEL_CHANNELS_URL + str(channel_id) + '.json' +def delete_channel(api_key, channel_id, region=Endpoints.REGION_US): + delete_url = Endpoints.of(region).DEL_CHANNELS_URL + str(channel_id) + '.json' result = requests.delete(delete_url, headers=setup_headers(api_key)) logger.info(result.url) return result -def delete_all_policies(api_key, account_id): +def delete_all_policies(api_key, account_id, region=Endpoints.REGION_US): logger.warn('Deleting all alert policies for account ' + str(account_id)) - result = get_all_alert_policies(api_key) + result = get_all_alert_policies(api_key, region) if result['response_count'] > 0: for policy in result[POLICIES]: logger.info('Deleting ' + policy['name']) - result = delete_policy(api_key, policy['id']) + result = delete_policy(api_key, policy['id'], region) logger.info('Delete status ' + str(result.status_code)) -def delete_all_channels(api_key, account_id): +def delete_all_channels(api_key, account_id, region=Endpoints.REGION_US): logger.warn('Deleting all notification channels for account ' + str(account_id)) result = get_channels(api_key) if result['response_count'] > 0: for channel in result[CHANNELS]: logger.info('Deleting ' + channel['name']) - result = delete_channel(api_key, channel['id']) + result = delete_channel(api_key, channel['id'], region) logger.info('Delete status ' + str(result.status_code)) -def create_synthetic_condition(api_key, alert_policy, synth_condition, monitor_name): - create_condition_url = CREATE_SYNTHETICS_CONDITION_URL + str(alert_policy['id']) + '.json' +def create_synthetic_condition(api_key, alert_policy, synth_condition, monitor_name, region=Endpoints.REGION_US): + create_condition_url = Endpoints.of(region).CREATE_SYNTHETICS_CONDITION_URL + str(alert_policy['id']) + '.json' payload = {SYNTH_CONDITION: synth_condition} response = requests.post(create_condition_url, headers=setup_headers(api_key), data=json.dumps(payload)) @@ -273,8 +255,8 @@ def create_synthetic_condition(api_key, alert_policy, synth_condition, monitor_n return result -def create_loc_failure_condition(api_key, alert_policy, loc_failure_condition): - create_condition_url = LOC_FAILURE_CONDITIONS_URL + str(alert_policy['id']) + '.json' +def create_loc_failure_condition(api_key, alert_policy, loc_failure_condition, region=Endpoints.REGION_US): + create_condition_url = Endpoints.of(region).LOC_FAILURE_CONDITIONS_URL + str(alert_policy['id']) + '.json' payload = {LOCATION_FAILURE_CONDITION: loc_failure_condition} response = requests.post(create_condition_url, headers=setup_headers(api_key), data=json.dumps(payload)) @@ -287,8 +269,8 @@ def create_loc_failure_condition(api_key, alert_policy, loc_failure_condition): return result -def create_app_condition(api_key, alert_policy, app_condition): - return create_alert_condition(api_key, CREATE_APP_CONDITION_URL, CONDITION, alert_policy, app_condition) +def create_app_condition(api_key, alert_policy, app_condition, region=Endpoints.REGION_US): + return create_alert_condition(api_key, Endpoints.of(region).CREATE_APP_CONDITION_URL, CONDITION, alert_policy, app_condition) def create_alert_condition(api_key, create_url, cond_key, alert_policy, condition): @@ -306,13 +288,14 @@ def create_alert_condition(api_key, create_url, cond_key, alert_policy, conditio return result -def create_extsvc_condition(api_key, alert_policy, condition): - return create_alert_condition(api_key, CREATE_EXTSVC_CONDITION_URL, EXTSVC_CONDITION, alert_policy, condition) +def create_extsvc_condition(api_key, alert_policy, condition, region=Endpoints.REGION_US): + return create_alert_condition(api_key, Endpoints.of(region).CREATE_EXTSVC_CONDITION_URL, EXTSVC_CONDITION, alert_policy, condition) + -def create_infra_condition(api_key, alert_policy, condition): +def create_infra_condition(api_key, alert_policy, condition, region=Endpoints.REGION_US): payload = {INFRA_CONDITION: condition} result = {} - response = requests.post(CREATE_INFRA_CONDITION_URL, headers=setup_headers(api_key), + response = requests.post(Endpoints.of(region).CREATE_INFRA_CONDITION_URL, headers=setup_headers(api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.status_code != 201: @@ -322,24 +305,25 @@ def create_infra_condition(api_key, alert_policy, condition): " : " + condition['name'] + ":" + str(response.status_code) + " : " + response.text) return result -def delete_condition(api_key, alert_policy, app_condition): - delete_url = APP_CONDITIONS_URL + str(app_condition['id']) + '.json' + +def delete_condition(api_key, alert_policy, app_condition, region=Endpoints.REGION_US): + delete_url = Endpoints.of(region).APP_CONDITIONS_URL + str(app_condition['id']) + '.json' result = requests.delete(delete_url, headers=setup_headers(api_key)) logger.info('Delete status for ' + alert_policy['name'] + ':' + app_condition['name'] + str(result.status_code)) -def synth_conditions_by_name_monitor(api_key, policy_id): +def synth_conditions_by_name_monitor(api_key, policy_id, region=Endpoints.REGION_US): conditions_by_name_monitor = {} - synth_conditions = get_synthetic_conditions(api_key, policy_id)[SYNTH_CONDITIONS] + synth_conditions = get_synthetic_conditions(api_key, policy_id, region)[SYNTH_CONDITIONS] for synth_condition in synth_conditions: if synth_condition[MONITOR_ID]: conditions_by_name_monitor[synth_condition['name'] + synth_condition[MONITOR_ID]] = synth_condition return conditions_by_name_monitor -def loc_conditions_by_name_monitor(api_key, policy_id): +def loc_conditions_by_name_monitor(api_key, policy_id, region=Endpoints.REGION_US): conditions_by_name_monitor = {} - loc_conditions = get_location_failure_conditions(api_key, policy_id)[LOCATION_FAILURE_CONDITIONS] + loc_conditions = get_location_failure_conditions(api_key, policy_id, region)[LOCATION_FAILURE_CONDITIONS] for loc_condition in loc_conditions: for entity_id in loc_condition['entities']: conditions_by_name_monitor[loc_condition['name'] + entity_id] = loc_condition @@ -354,13 +338,15 @@ def app_conditions_by_name_entity(api_key, policy_id): conditions_by_name_entity[app_condition['name'] + str(entity_id)] = app_condition return conditions_by_name_entity -def infra_conditions_by_name(api_key, policy_id): + +def infra_conditions_by_name(api_key, policy_id, region): conditions_by_name = {} - infra_conditions = get_infra_conditions(api_key, policy_id)[INFRA_CONDITIONS] + infra_conditions = get_infra_conditions(api_key, policy_id, region)[INFRA_CONDITIONS] for infra_condition in infra_conditions: conditions_by_name[infra_condition['name']] = infra_condition return conditions_by_name + def get_alert_status_file_name(fromFile, fromFileEntities, src_account_id, tgt_account_id): status_file_name = str(src_account_id) + '_' if fromFile: @@ -369,17 +355,15 @@ def get_alert_status_file_name(fromFile, fromFileEntities, src_account_id, tgt_a status_file_name += utils.file_name_from(fromFileEntities) + '_' return status_file_name + str(tgt_account_id) + '_conditions.csv' + def get_policy_entity_map(api_key, alert_policies): entities_by_policy = {} policies_by_entity = {} - for policy in alert_policies: policy_id = policy['id'] policy_name = policy['name'] apps = [] - logger.info('Loading app entity conditions for policy ID %d...' % policy_id) - conditions = get_app_conditions(api_key, policy_id) if not 'response_count' in conditions or conditions['response_count'] == 0: logger.info('No app entity conditions found for policy ID %d' % policy_id) @@ -407,20 +391,19 @@ def get_policy_entity_map(api_key, alert_policies): 'policies_by_entity': policies_by_entity } -def get_policy_names_by_entities(entity_names, account_id, api_key, use_local): - names = [] +def get_policy_names_by_entities(entity_names, account_id, api_key, use_local, region=Endpoints.REGION_US): + names = [] if use_local: alert_policy_entity_map = store.load_alert_policy_entity_map(account_id) else: - alert_policies = get_all_alert_policies(api_key) + alert_policies = get_all_alert_policies(api_key, region) alert_policy_entity_map = get_policy_entity_map(api_key, alert_policies['policies']) policies_by_entity = alert_policy_entity_map['policies_by_entity'] for entity_name in entity_names: entity_id = None - if entity_name.isnumeric(): entity_id = entity_name else: @@ -429,7 +412,7 @@ def get_policy_names_by_entities(entity_names, account_id, api_key, use_local): if match: entity_type = match.group(1) entity_name = match.group(2) - result = ec.get_entity_by_name(api_key, account_id, entity_type, entity_name) + result = ec.get_entity_by_name(api_key, account_id, entity_type, entity_name, region) if not result['entityFound']: continue entity = result['entity'] diff --git a/library/clients/dbentityclient.py b/library/clients/dbentityclient.py new file mode 100644 index 0000000..604ecca --- /dev/null +++ b/library/clients/dbentityclient.py @@ -0,0 +1,108 @@ +import requests +import json +import os +import library.utils as utils +import library.nrpylogger as nrpy_logger +import library.clients.gql as nerdgraph + +logger = nrpy_logger.get_logger(os.path.basename(__file__)) + + +class DashboardEntity: + + def __init__(self): + pass + + @staticmethod + def get(user_api_key, guid): + payload = DashboardEntity._get_dashboard_payload(guid) + logger.debug(json.dumps(payload)) + return nerdgraph.GraphQl.post(user_api_key, payload) + + @staticmethod + def get_pages_widgets(user_api_key, guid): + payload = DashboardEntity._get_pages_widgets_payload(guid) + logger.debug(json.dumps(payload)) + return nerdgraph.GraphQl.post(user_api_key, payload) + + @staticmethod + def create(user_api_key, account_id, dashboard): + payload = DashboardEntity._create_dashboard_payload(account_id, dashboard) + logger.debug(json.dumps(payload)) + return nerdgraph.GraphQl.post(user_api_key, payload) + + @staticmethod + def update_page_widgets(user_api_key, page_guid, widgets): + mutation_query = '''mutation($guid: EntityGuid!,$widgets: [DashboardUpdateWidgetInput!]!) { + dashboardUpdateWidgetsInPage(guid: $guid, widgets: $widgets) { + errors { description type } + } + }''' + payload = {'query': mutation_query, 'variables': {'guid': page_guid, 'widgets': widgets}} + return nerdgraph.GraphQl.post(user_api_key, payload) + + @staticmethod + def _create_dashboard_payload(account_id, dashboard): + mutation_query = '''mutation($accountId: Int!, $dashboard: DashboardInput!) { + dashboardCreate(accountId: $accountId , dashboard: $dashboard) { + entityResult { guid name } + errors { description } + } + }''' + return {'query': mutation_query, 'variables': {'accountId': account_id, 'dashboard': dashboard}} + + + @staticmethod + def _get_dashboard_payload(guid): + dashboard_query = '''query($guid: EntityGuid!) { + actor { + entity(guid: $guid) { + ... on DashboardEntity { + name + permissions + pages { + name + widgets { + visualization { id } + title + layout { row width height column } + rawConfiguration + linkedEntities { + accountId + entityType + name + guid + } + } + } + } + } + } + }''' + variables = {'guid': guid} + return {'query': dashboard_query, 'variables': variables} + + @staticmethod + def _get_pages_widgets_payload(guid): + dashboard_query = '''query($guid: EntityGuid!) { + actor { + entity(guid: $guid) { + ... on DashboardEntity { + name + pages { + name + guid + widgets { + id + title + visualization { id } + layout { row width height column } + rawConfiguration + } + } + } + } + } + }''' + variables = {'guid': guid} + return {'query': dashboard_query, 'variables': variables} diff --git a/library/clients/endpoints.py b/library/clients/endpoints.py new file mode 100644 index 0000000..28f9718 --- /dev/null +++ b/library/clients/endpoints.py @@ -0,0 +1,90 @@ +import os +import library.migrationlogger as m_logger + + +class Endpoints: + logger = m_logger.get_logger(os.path.basename(__file__)) + REGION_US = "us" + REGION_EU = "eu" + + @classmethod + def of(cls, region=REGION_US): + if region == cls.REGION_US: + return USEndpoints() + elif region == cls.REGION_EU: + return EUEndpoints() + else: + cls.logger.error("Incorrect region specified. Region can be either us or eu") + + +class USEndpoints: + + GRAPHQL_URL = 'https://api.newrelic.com/graphql' + SHOW_APM_APP_URL = 'https://api.newrelic.com/v2/applications/' + GET_APM_APP_URL = 'https://api.newrelic.com/v2/applications.json' + GET_BROWSER_APP_URL = 'https://api.newrelic.com/v2/browser_applications.json' + SHOW_MOBILE_APP_URL = 'https://api.newrelic.com/v2/mobile_applications/' + SHOW_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions/' + GET_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions.json' + PUT_LABEL_URL = 'https://api.newrelic.com/v2/labels.json' + GET_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards.json' + SHOW_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/' + DEL_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/' + MONITORS_URL = 'https://synthetics.newrelic.com/synthetics/api/v3/monitors/' + MONITORS_LABEL_URL = 'https://synthetics.newrelic.com/synthetics/api/v4/monitors/' + INSIGHTS_URL = 'https://insights-api.newrelic.com/v1/accounts/%s/query' + SEC_CREDENTIALS_URL = 'https://synthetics.newrelic.com/synthetics/api/v1/secure-credentials' + ALERTS_CHANNEL_URL = 'https://api.newrelic.com/v2/alerts_channels.json' + ALERT_POLICIES_URL = 'https://api.newrelic.com/v2/alerts_policies.json' + ALERT_POLICY_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_policy_channels.json' + DEL_ALERTS_URL = 'https://api.newrelic.com/v2/alerts_policies/' + DEL_CHANNELS_URL = 'https://api.newrelic.com/v2/alerts_channels/' + GET_APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions.json' + APP_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_conditions/' + CREATE_APP_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_conditions/policies/' + GET_SYNTH_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions.json' + CREATE_SYNTHETICS_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_synthetics_conditions/policies/' + LOC_FAILURE_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_location_failure_conditions/policies/' + NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions.json' + CREATE_NRQL_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_nrql_conditions/policies/' + EXTSVC_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions.json' + CREATE_EXTSVC_CONDITION_URL = 'https://api.newrelic.com/v2/alerts_external_service_conditions/policies/' + INFRA_CONDITIONS_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions' + CREATE_INFRA_CONDITION_URL = 'https://infra-api.newrelic.com/v2/alerts/conditions' + ENTITY_CONDITIONS_URL = 'https://api.newrelic.com/v2/alerts_entity_conditions' + +class EUEndpoints: + + GRAPHQL_URL = 'https://api.eu.newrelic.com/graphql' + SHOW_APM_APP_URL = 'https://api.eu.newrelic.com/v2/applications/' + GET_APM_APP_URL = 'https://api.eu.newrelic.com/v2/applications.json' + GET_BROWSER_APP_URL = 'https://api.eu.newrelic.com/v2/browser_applications.json' + SHOW_MOBILE_APP_URL = 'https://api.eu.newrelic.com/v2/mobile_applications/' + SHOW_APM_KT_URL = 'https://api.eu.newrelic.com/v2/key_transactions/' + GET_APM_KT_URL = 'https://api.eu.newrelic.com/v2/key_transactions.json' + PUT_LABEL_URL = 'https://api.eu.newrelic.com/v2/labels.json' + GET_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards.json' + SHOW_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards/' + DEL_DASHBOARDS_URL = 'https://api.eu.newrelic.com/v2/dashboards/' + MONITORS_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v3/monitors/' + MONITORS_LABEL_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v4/monitors/' + INSIGHTS_URL = 'https://insights-api.eu.newrelic.com/v1/accounts/%s/query' + SEC_CREDENTIALS_URL = 'https://synthetics.eu.newrelic.com/synthetics/api/v1/secure-credentials' + ALERTS_CHANNEL_URL = 'https://api.eu.newrelic.com/v2/alerts_channels.json' + ALERT_POLICIES_URL = 'https://api.eu.newrelic.com/v2/alerts_policies.json' + ALERT_POLICY_CHANNELS_URL = 'https://api.eu.newrelic.com/v2/alerts_policy_channels.json' + DEL_ALERTS_URL = 'https://api.eu.newrelic.com/v2/alerts_policies/' + DEL_CHANNELS_URL = 'https://api.eu.newrelic.com/v2/alerts_channels/' + GET_APP_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions.json' + APP_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions/' + CREATE_APP_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_conditions/policies/' + GET_SYNTH_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_synthetics_conditions.json' + CREATE_SYNTHETICS_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_synthetics_conditions/policies/' + LOC_FAILURE_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_location_failure_conditions/policies/' + NRQL_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_nrql_conditions.json' + CREATE_NRQL_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_nrql_conditions/policies/' + EXTSVC_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_external_service_conditions.json' + CREATE_EXTSVC_CONDITION_URL = 'https://api.eu.newrelic.com/v2/alerts_external_service_conditions/policies/' + INFRA_CONDITIONS_URL = 'https://infra-api.eu.newrelic.com/v2/alerts/conditions' + CREATE_INFRA_CONDITION_URL = 'https://infra-api.eu.newrelic.com/v2/alerts/conditions' + ENTITY_CONDITIONS_URL = 'https://api.eu.newrelic.com/v2/alerts_entity_conditions' diff --git a/library/clients/entityclient.py b/library/clients/entityclient.py index 7ad33b6..ab2e7d3 100644 --- a/library/clients/entityclient.py +++ b/library/clients/entityclient.py @@ -2,9 +2,9 @@ import json import os import library.migrationlogger as m_logger -import library.utils as utils import collections import logging +from library.clients.endpoints import Endpoints APM_APP = 'APM_APP' APM_KT = 'APM_KT' @@ -29,21 +29,10 @@ ent_type_lookup[INFRA_HOST] = 'INFRASTRUCTURE_HOST_ENTITY' ent_type_lookup[INFRA_INT] = 'GENERIC_INFRASTRUCTURE_ENTITY' ent_type_lookup[INFRA_LAMBDA] = 'INFRASTRUCTURE_AWS_LAMBDA_FUNCTION_ENTITY' - - -GRAPHQL_URL = 'https://api.newrelic.com/graphql' -SHOW_APM_APP_URL = 'https://api.newrelic.com/v2/applications/' -GET_APM_APP_URL = 'https://api.newrelic.com/v2/applications.json' -GET_BROWSER_APP_URL = 'https://api.newrelic.com/v2/browser_applications.json' -SHOW_MOBILE_APP_URL = 'https://api.newrelic.com/v2/mobile_applications/' -SHOW_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions/' -GET_APM_KT_URL = 'https://api.newrelic.com/v2/key_transactions.json' KEY_TRANSACTIONS = 'key_transactions' -PUT_LABEL_URL = 'https://api.newrelic.com/v2/labels.json' -GET_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards.json' DASHBOARDS = 'dashboards' -SHOW_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/' -DEL_DASHBOARDS_URL = 'https://api.newrelic.com/v2/dashboards/' + +DEFAULT_REGION = Endpoints.REGION_US logger = m_logger.get_logger(os.path.basename(__file__)) @@ -240,7 +229,6 @@ def matched_browser_app(entity, tgt_account_id, src_entity): return matched - def matched_entity_name(entity_type, entity, tgt_account_id, name): matched = False if entity['entityType'] == ent_type_lookup[entity_type] and \ @@ -249,10 +237,12 @@ def matched_entity_name(entity_type, entity, tgt_account_id, name): matched = True return matched -def get_matching_kt(tgt_api_key, kt_name): + +def get_matching_kt(tgt_api_key, kt_name, region): filter_params = {'filter[name]': kt_name} result = {'entityFound': False} - response = requests.get(GET_APM_KT_URL, headers=rest_api_headers(tgt_api_key), params=filter_params) + response = requests.get(Endpoints.of(region).GET_APM_KT_URL, headers=rest_api_headers(tgt_api_key), + params=filter_params) result['status'] = response.status_code if response.text: response_json = response.json() @@ -268,11 +258,11 @@ def extract_entities(gql_rsp_json): return list(filter(None, rsp_entities)) # remove empty dicts from list -def gql_get_matching_entity(api_key, entity_type, src_entity, tgt_account_id): +def gql_get_matching_entity(api_key, entity_type, src_entity, tgt_account_id, region=DEFAULT_REGION): logger.info('looking for matching entity ' + src_entity['name'] + ' in account ' + tgt_account_id) payload = search_query_payload(entity_type, src_entity['name'], tgt_account_id) result = {'entityFound': False} - response = requests.post(GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.text: response_json = response.json() @@ -307,11 +297,11 @@ def set_matched_entity(entities, entity_type, result, src_entity, tgt_account_id break -def gql_get_matching_entity_by_name(api_key, entity_type, name, tgt_acct_id): +def gql_get_matching_entity_by_name(api_key, entity_type, name, tgt_acct_id, region=DEFAULT_REGION): logger.info('Searching matching entity for type:' + entity_type + ', name:' + name + ', acct:' + str(tgt_acct_id)) payload = search_query_payload(entity_type, name, tgt_acct_id) result = {'entityFound': False} - response = requests.post(GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.text: response_json = response.json() @@ -376,9 +366,12 @@ def get_entities_payload(entity_type, acct_id = None, nextCursor = None, tag_nam payload = {'query': entity_search_query, 'variables': variables} return payload -def gql_get_entities_by_type(api_key, entity_type, acct_id = None, tag_name = None, tag_value = None): - logger.info('Searching for entities by type:' + entity_type + ', acct:' + str(acct_id or 'not provided') + ', tag name: ' + str(tag_name or 'not provided') + ', tag value: ' + str(tag_value or 'not provided')) - + +def gql_get_entities_by_type(api_key, entity_type, acct_id=None, tag_name=None, tag_value=None, region=DEFAULT_REGION): + logger.info('Searching for entities by type:' + entity_type + ', acct:' + str(acct_id or 'not provided') + + ', tag name: ' + str(tag_name or 'not provided') + ', tag value: ' + str(tag_value or 'not provided') + + ', region: ' + region) + logger.info('Will use GRAPHQL_URL : ' + Endpoints.of(region).GRAPHQL_URL) done = False nextCursor = None count = 0 @@ -389,7 +382,7 @@ def gql_get_entities_by_type(api_key, entity_type, acct_id = None, tag_name = No while not done: payload = get_entities_payload(entity_type, acct_id, nextCursor, tag_name, tag_value) - response = requests.post(GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) if response.status_code != 200: done = True if response.text: @@ -401,7 +394,7 @@ def gql_get_entities_by_type(api_key, entity_type, acct_id = None, tag_name = No if not response.text: done = True break - + logger.info(response.text) response_json = response.json() if 'errors' in response_json: done = True @@ -430,11 +423,12 @@ def gql_get_entities_by_type(api_key, entity_type, acct_id = None, tag_name = No return result -def gql(api_key, payload): + +def gql(api_key, payload, region=DEFAULT_REGION): if logger.isEnabledFor(logging.DEBUG): logger.debug(json.dumps(payload, indent=2)) - response = requests.post(GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(api_key), data=json.dumps(payload)) if response.status_code != 200: logger.error('HTTP error fetching entities: %d: %s' % ( response.status_code, response.text() @@ -474,34 +468,33 @@ def gql(api_key, payload): 'data': response_json['data'] } -def gql_get_paginated_results(api_key, payload_builder, payload_processor): + +def gql_get_paginated_results(api_key, payload_builder, payload_processor, region): done = False nextCursor = None - while not done: - gql_result = gql(api_key, payload_builder(nextCursor)) + gql_result = gql(api_key, payload_builder(nextCursor), region) if gql_result['error']: return gql_result['error'] - nextCursor = payload_processor(gql_result['data']) if not nextCursor: done = True - return None -def show_url_for_app(entity_type, app_id): + +def show_url_for_app(entity_type, app_id, region=Endpoints.REGION_US): if MOBILE_APP == entity_type: - show_url = SHOW_MOBILE_APP_URL + show_url = Endpoints.of(region).SHOW_MOBILE_APP_URL if APM_APP == entity_type: - show_url = SHOW_APM_APP_URL + show_url = Endpoints.of(region).SHOW_APM_APP_URL if show_url: return show_url + app_id + '.json' logger.error('Only supported for ' + MOBILE_APP + ' and ' + APM_APP) -def get_app_entity(api_key, entity_type, app_id): +def get_app_entity(api_key, entity_type, app_id, region=Endpoints.REGION_US): result = {'entityFound': False} - get_url = show_url_for_app(entity_type, app_id) + get_url = show_url_for_app(entity_type, app_id, region) response = requests.get(get_url, headers=rest_api_headers(api_key)) result['status'] = response.status_code if response.status_code != 200: @@ -514,10 +507,10 @@ def get_app_entity(api_key, entity_type, app_id): return result -def get_apm_entity_by_name(api_key, app_name): +def get_apm_entity_by_name(api_key, app_name, region=Endpoints.REGION_US): params = {'filter[name]': app_name} result = {'entityFound': False} - response = requests.get(GET_APM_APP_URL, headers=rest_api_headers(api_key), params=params) + response = requests.get(Endpoints.of(region).GET_APM_APP_URL, headers=rest_api_headers(api_key), params=params) result['status'] = response.status_code if response.status_code != 200: if response.text: @@ -534,10 +527,10 @@ def get_apm_entity_by_name(api_key, app_name): return result -def get_browser_entity(api_key, app_id): +def get_browser_entity(api_key, app_id, region=Endpoints.REGION_US): params = {'filter[ids]': [app_id]} result = {'entityFound': False} - get_url = GET_BROWSER_APP_URL + get_url = Endpoints.of(region).GET_BROWSER_APP_URL response = requests.get(get_url, headers=rest_api_headers(api_key), params=params) logger.info(response.url) result['status'] = response.status_code @@ -558,9 +551,9 @@ def get_browser_entity(api_key, app_id): return result -def get_apm_kt(api_key, kt_id): +def get_apm_kt(api_key, kt_id, region=Endpoints.REGION_US): result = {'entityFound': False} - get_url = SHOW_APM_KT_URL + kt_id + '.json' + get_url = Endpoints.of(region).SHOW_APM_KT_URL + kt_id + '.json' response = requests.get(get_url, headers=rest_api_headers(api_key)) result['status'] = response.status_code if response.status_code != 200: @@ -573,30 +566,33 @@ def get_apm_kt(api_key, kt_id): return result -def get_entity(api_key, entity_type, entity_id): +def get_entity(api_key, entity_type, entity_id, region=Endpoints.REGION_US): if entity_type in [APM_APP, MOBILE_APP]: - return get_app_entity(api_key, entity_type, entity_id) + return get_app_entity(api_key, entity_type, entity_id, region) if entity_type == BROWSER_APP: - return get_browser_entity(api_key, entity_id) + return get_browser_entity(api_key, entity_id, region) if entity_type == APM_KT: - return get_apm_kt(api_key, entity_id) + return get_apm_kt(api_key, entity_id, region) logger.warn('Skipping non APM entities ' + entity_type) return {'entityFound': False} -def get_entity_by_name(api_key, acct_id, entity_type, entity_name): + +def get_entity_by_name(api_key, acct_id, entity_type, entity_name, region=Endpoints.REGION_US): logger.info('Searching matching entity for type:' + entity_type + ', name:' + entity_name + ', acct:' + str(acct_id)) if entity_type == APM_KT: - return get_matching_kt(api_key, entity_name) + return get_matching_kt(api_key, entity_name, region) return gql_get_matching_entity_by_name(api_key, entity_type, entity_name, acct_id) + # didn't end up using this as it was returning 500 errors sporadically in my test account # see gql_mutate_add_tag instead -def put_apm_label(api_key, category, name, applications): +def put_apm_label(api_key, category, name, applications, region=Endpoints.REGION_US): label_payload = {'label': {'category': category, 'name': name, 'links': {'applications': applications}}} result = {} - response = requests.put(PUT_LABEL_URL, headers=rest_api_headers(api_key), data=json.dumps(label_payload)) + response = requests.put(Endpoints.of(region).PUT_LABEL_URL, headers=rest_api_headers(api_key), + data=json.dumps(label_payload)) result['status'] = response.status_code if response.status_code in [200, 204] and response.text: result['label'] = response.json()['label'] @@ -605,7 +601,7 @@ def put_apm_label(api_key, category, name, applications): return result -def put_apm_settings(api_key, app_id, app_settings): +def put_apm_settings(api_key, app_id, app_settings, region=Endpoints.REGION_US): logger.debug(app_settings) updated_settings = { "application": { @@ -617,7 +613,7 @@ def put_apm_settings(api_key, app_id, app_settings): } } result = {} - update_app_url = SHOW_APM_APP_URL + str(app_id) + '.json' + update_app_url = Endpoints.of(region).SHOW_APM_APP_URL + str(app_id) + '.json' response = requests.put(update_app_url, headers=rest_api_headers(api_key), data=json.dumps(updated_settings)) result['status'] = response.status_code if response.status_code in [200, 204] and response.text: @@ -626,6 +622,7 @@ def put_apm_settings(api_key, app_id, app_settings): result['error'] = response.text return result + # Input: tags from source and target entities # Output: array of tags that need to be applied on target entity def tags_diff(src_tags, tgt_tags): @@ -662,10 +659,10 @@ def replace_tags_payload(entity_guid, arr_tags): return mutate_tags_payload(entity_guid, arr_tags, 'taggingReplaceTagsOnEntity') -def gql_mutate_add_tags(per_api_key, entity_guid, arr_tags): +def gql_mutate_add_tags(per_api_key, entity_guid, arr_tags, region=DEFAULT_REGION): payload = apply_tags_payload(entity_guid, arr_tags) result = {} - response = requests.post(GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.text: response_json = response.json() @@ -678,10 +675,10 @@ def gql_mutate_add_tags(per_api_key, entity_guid, arr_tags): return result -def gql_mutate_replace_tags(per_api_key, entity_guid, tags): +def gql_mutate_replace_tags(per_api_key, entity_guid, tags, region=DEFAULT_REGION): payload = replace_tags_payload(entity_guid, tags) result = {} - response = requests.post(GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.text: response_json = response.json() @@ -694,11 +691,10 @@ def gql_mutate_replace_tags(per_api_key, entity_guid, tags): return result -def get_dashboard_definition(per_api_key, name, acct_id): - result = gql_get_matching_entity_by_name(per_api_key, DASHBOARD, name, acct_id) +def get_dashboard_definition(per_api_key, name, acct_id, region=Endpoints.REGION_US): + result = gql_get_matching_entity_by_name(per_api_key, DASHBOARD, name, acct_id, region) if not result['entityFound']: return None - return result['entity'] def dashboard_query_payload(dashboard_guid): @@ -728,10 +724,11 @@ def dashboard_query_payload(dashboard_guid): payload = {'query': dashboard_query, 'variables': variables} return payload -def get_dashboard_widgets(per_api_key, dashboard_guid): + +def get_dashboard_widgets(per_api_key, dashboard_guid, region=DEFAULT_REGION): result = {'entityFound': False} payload = dashboard_query_payload(dashboard_guid) - response = requests.post(GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) result['status'] = response.status_code if response.status_code != 200: if response.text: @@ -770,9 +767,10 @@ def create_dashboard_payload(acct_id, dashboard): payload = {'query': create_dashboard_query, 'variables': variables} return payload -def post_dashboard(per_api_key, dashboard, acct_id): + +def post_dashboard(per_api_key, dashboard, acct_id, region=DEFAULT_REGION): payload = create_dashboard_payload(acct_id, dashboard) - response = requests.post(GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) result = {'status': response.status_code} if response.status_code != 200 and response.status_code != 201: @@ -811,9 +809,10 @@ def delete_dashboard_payload(guid): payload = {'query': delete_dashboard_query, 'variables': variables} return payload -def delete_dashboard(per_api_key, guid): + +def delete_dashboard(per_api_key, guid, region=DEFAULT_REGION): payload = delete_dashboard_payload(guid) - response = requests.post(GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) + response = requests.post(Endpoints.of(region).GRAPHQL_URL, headers=gql_headers(per_api_key), data=json.dumps(payload)) result = {'status': response.status_code} if response.status_code != 200: @@ -838,6 +837,7 @@ def delete_dashboard(per_api_key, guid): return result + def delete_dashboards(per_api_key, dashboard_names, acct_id): for dashboard_name in dashboard_names: result = get_dashboard_definition(per_api_key, dashboard_name, acct_id) @@ -845,23 +845,20 @@ def delete_dashboards(per_api_key, dashboard_names, acct_id): delete_dashboard(per_api_key, result['guid']) -def delete_all_dashboards(per_api_key, acct_id): - result = gql_get_entities_by_type(per_api_key, DASHBOARD, acct_id) +def delete_all_dashboards(per_api_key, acct_id, region): + result = gql_get_entities_by_type(per_api_key, DASHBOARD, acct_id, None, None, region) if 'error' in result: logger.error('Error : ' + result['error']) return - count = result['count'] - if count <= 0: logger.info('No dashboards to delete') return - logger.info('Deleting ' + str(count) + ' dashboards') - for dashboard in result['entities']: logger.info('Deleting ' + dashboard['name']) - delete_dashboard(per_api_key, dashboard['guid']) + delete_dashboard(per_api_key, dashboard['guid'], region) + def get_nrql_condition_ids_payload(account_id, policy_id, nextCursor = None): cursor = ', cursor: "%s"' % nextCursor if nextCursor else '' @@ -892,7 +889,8 @@ def get_nrql_condition_ids_payload(account_id, policy_id, nextCursor = None): } } -def get_nrql_condition_ids(api_key, account_id, policy_id): + +def get_nrql_condition_ids(api_key, account_id, policy_id, region): ids = [] def build_payload(nextCursor): @@ -902,20 +900,18 @@ def process_payload(data): search = data['actor']['account']['alerts']['nrqlConditionsSearch'] if not search: return None - if 'nrqlConditions' in search: for condition in search['nrqlConditions']: ids.append(condition['id']) - return search['nextCursor'] - - error = gql_get_paginated_results(api_key, build_payload, process_payload) + error = gql_get_paginated_results(api_key, build_payload, process_payload, region) return { 'error': error, 'condition_ids': ids } + def get_nrql_condition_payload(account_id, condition_id): nrql_condition_query = ''' query($accountId: Int!, $conditionId: ID!) { @@ -979,8 +975,9 @@ def get_nrql_condition_payload(account_id, condition_id): } } -def get_nrql_conditions(api_key, account_id, policy_id): - condition_ids = get_nrql_condition_ids(api_key, account_id, policy_id) + +def get_nrql_conditions(api_key, account_id, policy_id, region): + condition_ids = get_nrql_condition_ids(api_key, account_id, policy_id, region) if condition_ids['error']: return { 'error': condition_ids['error'], @@ -991,7 +988,8 @@ def get_nrql_conditions(api_key, account_id, policy_id): for condition_id in condition_ids['condition_ids']: result = gql( api_key, - get_nrql_condition_payload(account_id, condition_id) + get_nrql_condition_payload(account_id, condition_id), + region ) if result['error']: return { @@ -1008,8 +1006,10 @@ def get_nrql_conditions(api_key, account_id, policy_id): 'conditions': conditions } + def create_nrql_condition( api_key, + region, account_id, policy_id, condition, @@ -1039,7 +1039,7 @@ def create_nrql_condition( } } - result = gql(api_key, payload) + result = gql(api_key, payload, region) if result['error']: return { 'error': result['error'], @@ -1052,7 +1052,3 @@ def create_nrql_condition( 'status': result['status'], 'condition_id': result['data'][mutation]['id'] } - - - - diff --git a/library/clients/insightsclient.py b/library/clients/insightsclient.py index 832b1b9..5b951c5 100644 --- a/library/clients/insightsclient.py +++ b/library/clients/insightsclient.py @@ -1,8 +1,9 @@ import os import requests import library.migrationlogger as m_logger +from library.clients.endpoints import Endpoints + -insights_url = 'https://insights-api.newrelic.com/v1/accounts/%s/query' PERF_STATS = 'performanceStats' METADATA = 'metadata' @@ -13,10 +14,10 @@ def setup_headers(api_key): return {'X-Query-Key': api_key, 'Content-Type': 'Application/JSON'} -def execute(insights_query_key, account_id, insights_query): +def execute(insights_query_key, account_id, insights_query, region=Endpoints.REGION_US): log.debug(insights_query) query_params = {'nrql': insights_query} - query_url = insights_url % account_id + query_url = Endpoints.of(region).INSIGHTS_URL % account_id response = requests.get(query_url, headers=setup_headers(insights_query_key), params=query_params) result = {'status': response.status_code} diff --git a/library/clients/monitorsclient.py b/library/clients/monitorsclient.py index 18d0f0a..d5f341e 100644 --- a/library/clients/monitorsclient.py +++ b/library/clients/monitorsclient.py @@ -1,20 +1,20 @@ import json import requests import os -import time import library.migrationlogger as m_logger import library.monitortypes as monitortypes import library.status.monitorstatus as monitorstatus import library.securecredentials as securecredentials import library.clients.entityclient as ec +from library.clients.endpoints import Endpoints +import time # monitors provides REST client calls for fetching a monitor and a monitor script # and populating a monitor_json with it's script # Batch size for fetching monitors, must be less than or equal to 100 BATCH_SIZE = 100 -monitors_url = 'https://synthetics.newrelic.com/synthetics/api/v3/monitors/' -monitor_label_url = 'https://synthetics.newrelic.com/synthetics/api/v4/monitors/' + logger = m_logger.get_logger(os.path.basename(__file__)) NEW_MONITOR_ID = 'new_monitor_id' MON_SEC_CREDENTIALS = 'secureCredentials' @@ -24,8 +24,8 @@ def setup_headers(api_key): return {'Api-Key': api_key, 'Content-Type': 'Application/JSON'} -def fetch_script(api_key, monitor_id): - get_script_url = monitors_url + monitor_id + "/script" +def fetch_script(api_key, monitor_id, region=Endpoints.REGION_US): + get_script_url = Endpoints.of(region).MONITORS_URL + monitor_id + "/script" response = requests.get(get_script_url, headers=setup_headers(api_key)) if response.status_code == 200: body_str = json.loads(response.text) @@ -34,8 +34,8 @@ def fetch_script(api_key, monitor_id): return {'status': response.status_code, 'body': body_str} -def get_monitor(api_key, monitor_id): - get_monitor_url = monitors_url + monitor_id +def get_monitor(api_key, monitor_id, region=Endpoints.REGION_US): + get_monitor_url = Endpoints.of(region).MONITORS_URL + monitor_id response = requests.get(get_monitor_url, headers=setup_headers(api_key)) result = {'status': response.status_code } if response.status_code == 200: @@ -48,13 +48,14 @@ def get_monitor(api_key, monitor_id): return result -def fetch_all_monitors(api_key): +def fetch_all_monitors(api_key, region=Endpoints.REGION_US): + print(region) query_params = {'offset': 0, 'limit': BATCH_SIZE} fetch_more = True all_monitors_def_json = [] logger.info("Fetching all monitor definitions with query_params " + str(query_params)) while fetch_more: - response = requests.get(monitors_url, headers=setup_headers(api_key), params=query_params) + response = requests.get(Endpoints.of(region).MONITORS_URL, headers=setup_headers(api_key), params=query_params) response_json = json.loads(response.text) monitors_returned = response_json['count'] if monitors_returned == 0 or monitors_returned < query_params['limit']: @@ -112,11 +113,11 @@ def get_target_monitor_guid(monitor_name, per_api_key, tgt_acct_id): return monitor_guid -def post_monitor_definition(api_key, monitor_name, monitor, monitor_status): +def post_monitor_definition(api_key, monitor_name, monitor, monitor_status, region=Endpoints.REGION_US): prep_monitor = monitortypes.prep_monitor_type(monitor['definition']) monitor_json_str = json.dumps(prep_monitor) logger.debug(monitor_json_str) - response = requests.post(monitors_url, headers=setup_headers(api_key), data=monitor_json_str) + response = requests.post(Endpoints.of(region).MONITORS_URL, headers=setup_headers(api_key), data=monitor_json_str) post_status = {monitorstatus.STATUS: response.status_code} logger.debug(response.headers) if response.status_code == 201: @@ -130,11 +131,11 @@ def post_monitor_definition(api_key, monitor_name, monitor, monitor_status): logger.info(monitor_name + " : " + str(post_status)) -def update(api_key, monitor_id, update_json, monitor_name): +def update(api_key, monitor_id, update_json, monitor_name, region=Endpoints.REGION_US): logger.info('Updating ' + monitor_name) update_payload = json.dumps(update_json) logger.info(update_payload) - put_monitor_url = monitors_url + str(monitor_id) + put_monitor_url = Endpoints.of(region).MONITORS_URL + str(monitor_id) result = {'entityUpdated': False} response = requests.patch(put_monitor_url, headers=setup_headers(api_key), data=update_payload) result['status'] = response.status_code @@ -147,3 +148,19 @@ def update(api_key, monitor_id, update_json, monitor_name): else: result['updatedEntity'] = str(update_json) return result + + +def delete_monitor(monitor, target_acct, failure_status, success_status, tgt_api_key, region): + logger.info(monitor) + monitor_id = monitor['id'] + monitor_name = monitor['name'] + response = requests.delete(Endpoints.of(region).MONITORS_URL + monitor_id, + headers=setup_headers(tgt_api_key)) + if response.status_code == 204: + success_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} + logger.info(target_acct + ":" + monitor_name + ":" + str(success_status[monitor_name])) + else: + failure_status[monitor_name] = {'status': response.status_code, 'responseText': response.text} + logger.info(target_acct + ":" + monitor_name + ":" + str(failure_status[monitor_name])) + # trying to stay within 3 requests per second + time.sleep(0.3) diff --git a/library/localstore.py b/library/localstore.py index 6070990..60cdc18 100644 --- a/library/localstore.py +++ b/library/localstore.py @@ -233,11 +233,13 @@ def save_alert_policies(account_id, alert_policies): alert_policies_dir = base_dir / account_id / ALERT_POLICIES_DIR save_json(alert_policies_dir, ALERT_POLICIES_FILE, alert_policies) + def save_alert_policy_entity_map(account_id, alert_policies_app_map): base_dir = Path("db") alert_policies_dir = base_dir / account_id / ALERT_POLICIES_DIR save_json(alert_policies_dir, ALERT_POLICY_ENTITY_MAP_FILE, alert_policies_app_map) + def save_alert_violations(account_id, alert_violations): base_dir = Path("db") alert_violations_dir = base_dir / account_id / ALERT_VIOLATIONS_DIR diff --git a/library/migrator/app_conditions.py b/library/migrator/app_conditions.py index 80b5582..b44672e 100644 --- a/library/migrator/app_conditions.py +++ b/library/migrator/app_conditions.py @@ -8,30 +8,32 @@ logger = logger.get_logger(os.path.basename(__file__)) -def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): +def migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): logger.info('loading source app conditions') - all_app_conditions = ac.get_app_conditions(src_api_key, src_policy['id'])[ac.CONDITIONS] + all_app_conditions = ac.get_app_conditions(src_api_key, src_policy['id'], src_region)[ac.CONDITIONS] logger.info("Found app alert conditions " + str(len(all_app_conditions))) tgt_app_conds = ac.app_conditions_by_name_entity(tgt_api_key, tgt_policy['id']) condition_num = 0 for app_condition in all_app_conditions: condition_num = condition_num + 1 entity_type = utils.get_entity_type(app_condition) - condition_row = create_condition_status_row(all_alert_status, app_condition, condition_num, entity_type, policy_name) + condition_row = create_condition_status_row(all_alert_status, app_condition, condition_num, + entity_type, policy_name) entity_ids = app_condition[ac.ENTITIES] tgt_entities = [] tgt_existing = [] for entity_id in entity_ids: - result = ec.get_entity(src_api_key, entity_type, entity_id) + result = ec.get_entity(src_api_key, entity_type, entity_id, src_region) if not result['entityFound']: status_src_not_found(all_alert_status, condition_row, entity_type, entity_id) continue src_entity = result['entity'] logger.info('source entity found ' + str(src_entity['id'])) if entity_type == ec.APM_KT: - result = ec.get_matching_kt(tgt_api_key,src_entity['name']) + result = ec.get_matching_kt(tgt_api_key, src_entity['name'], tgt_region) else: - result = ec.gql_get_matching_entity(tgt_api_key, entity_type, src_entity, tgt_acct_id) + result = ec.gql_get_matching_entity(tgt_api_key, entity_type, src_entity, tgt_acct_id, tgt_region) if not result['entityFound']: status_tgt_not_found(all_alert_status, condition_row, src_entity, app_condition) continue @@ -52,7 +54,7 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, update_condition_status(all_alert_status, condition_row, entity_ids, tgt_acct_id, tgt_entities) tgt_condition = create_tgt_app_condition(app_condition, tgt_entities, match_source_status) - result = ac.create_app_condition(tgt_api_key, tgt_policy, tgt_condition) + result = ac.create_app_condition(tgt_api_key, tgt_policy, tgt_condition, tgt_region) all_alert_status[condition_row][cs.STATUS] = result['status'] if cs.ERROR in result.keys(): all_alert_status[condition_row][cs.ERROR] = result['error'] diff --git a/library/migrator/extsvc_conditions.py b/library/migrator/extsvc_conditions.py index 1ca9961..b9aa6c9 100644 --- a/library/migrator/extsvc_conditions.py +++ b/library/migrator/extsvc_conditions.py @@ -8,9 +8,9 @@ log = m_logger.get_logger(os.path.basename(__file__)) -def extsvc_conditions_by_name_entity(api_key, policy_id): +def extsvc_conditions_by_name_entity(api_key, policy_id, region): conditions_by_name_entity = {} - extsvc_conditions = ac.get_extsvc_conditions(api_key, policy_id)[ac.EXTSVC_CONDITIONS] + extsvc_conditions = ac.get_extsvc_conditions(api_key, policy_id, region)[ac.EXTSVC_CONDITIONS] for extsvc_condition in extsvc_conditions: for entity_id in extsvc_condition['entities']: conditions_by_name_entity[extsvc_condition['name'] + str(entity_id)] = extsvc_condition @@ -24,14 +24,15 @@ def get_entity_type(extsvc_condition): return ec.MOBILE_APP -def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): +def migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): log.info('loading source ext svc conditions') - extsvc_conditions = ac.get_extsvc_conditions(src_api_key, src_policy['id'])[ac.EXTSVC_CONDITIONS] + extsvc_conditions = ac.get_extsvc_conditions(src_api_key, src_policy['id'], src_region)[ac.EXTSVC_CONDITIONS] if len(extsvc_conditions) <= 0: log.info("No external service conditions found.") return log.info("Found ext svc conditions " + str(len(extsvc_conditions))) - tgt_extsvc_conds = extsvc_conditions_by_name_entity(tgt_api_key, tgt_policy['id']) + tgt_extsvc_conds = extsvc_conditions_by_name_entity(tgt_api_key, tgt_policy['id'], tgt_region) cond_num = 0 for extsvc_condition in extsvc_conditions: cond_num = cond_num + 1 @@ -41,13 +42,13 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_entities = [] tgt_existing = [] for entity_id in entity_ids: - result = ec.get_entity(src_api_key, entity_type, entity_id) + result = ec.get_entity(src_api_key, entity_type, entity_id, src_region) if not result['entityFound']: status_src_not_found(all_alert_status, cond_row, entity_type, entity_id) continue src_entity = result['entity'] log.info('source entity found ' + str(src_entity['id'])) - result = ec.gql_get_matching_entity(tgt_api_key, entity_type, src_entity, tgt_acct_id) + result = ec.gql_get_matching_entity(tgt_api_key, entity_type, src_entity, tgt_acct_id, tgt_region) if not result['entityFound']: status_tgt_not_found(all_alert_status, cond_row, src_entity, extsvc_condition) continue @@ -64,7 +65,7 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, update_condition_status(all_alert_status, cond_row, entity_ids, tgt_acct_id, tgt_entities) tgt_condition = create_tgt_extsvc_condition(extsvc_condition, tgt_entities, match_source_status) - result = ac.create_extsvc_condition(tgt_api_key, tgt_policy, tgt_condition) + result = ac.create_extsvc_condition(tgt_api_key, tgt_policy, tgt_condition, tgt_region) all_alert_status[cond_row][cs.STATUS] = result['status'] if cs.ERROR in result.keys(): all_alert_status[cond_row][cs.ERROR] = result['error'] diff --git a/library/migrator/infra_conditions.py b/library/migrator/infra_conditions.py index 0ee0b61..24b25f0 100644 --- a/library/migrator/infra_conditions.py +++ b/library/migrator/infra_conditions.py @@ -5,12 +5,14 @@ logger = logger.get_logger(os.path.basename(__file__)) -def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): + +def migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): logger.info('Loading source infrastructure conditions ') - infra_conditions = ac.get_infra_conditions(src_api_key, src_policy['id'])[ac.INFRA_CONDITIONS] + infra_conditions = ac.get_infra_conditions(src_api_key, src_policy['id'], src_region)[ac.INFRA_CONDITIONS] logger.info('Found infrastructure conditions ' + str(len(infra_conditions))) logger.info('Loading target infrastructure conditions ' + policy_name) - tgt_infra_conds = ac.infra_conditions_by_name(tgt_api_key, tgt_policy['id']) + tgt_infra_conds = ac.infra_conditions_by_name(tgt_api_key, tgt_policy['id'], tgt_region) condition_num = 0 for infra_condition in infra_conditions: condition_num = condition_num + 1 @@ -19,11 +21,12 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, if infra_condition['name'] not in tgt_infra_conds: logger.info('Creating target infrastructure condition ' + infra_condition['name']) tgt_condition = create_tgt_infra_condition(infra_condition, tgt_policy['id'], match_source_status) - result = ac.create_infra_condition(tgt_api_key, tgt_policy, tgt_condition) + result = ac.create_infra_condition(tgt_api_key, tgt_policy, tgt_condition, tgt_region) all_alert_status[condition_row][cs.STATUS] = result['status'] if 'error' in result.keys(): all_alert_status[condition_row][cs.ERROR] = result['error'] + def create_tgt_infra_condition(infra_condition, tgt_pol_id, match_source_status): tgt_condition = infra_condition.copy() tgt_condition.pop('id') @@ -32,4 +35,4 @@ def create_tgt_infra_condition(infra_condition, tgt_pol_id, match_source_status) tgt_condition['policy_id'] = tgt_pol_id if match_source_status == False: tgt_condition['enabled'] = False - return tgt_condition \ No newline at end of file + return tgt_condition diff --git a/library/migrator/loc_failure_conditions.py b/library/migrator/loc_failure_conditions.py index b196243..475dbe2 100644 --- a/library/migrator/loc_failure_conditions.py +++ b/library/migrator/loc_failure_conditions.py @@ -8,26 +8,28 @@ logger = logger.get_logger(os.path.basename(__file__)) -def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): +def migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): logger.info('Loading source location failure conditions ') - result = ac.get_location_failure_conditions(src_api_key, src_policy['id']) + result = ac.get_location_failure_conditions(src_api_key, src_policy['id'], src_region) loc_conds = [] if result['response_count'] > 0: logger.info('location failure response count ' + str(result['response_count'])) loc_conds = result[ac.LOCATION_FAILURE_CONDITIONS] logger.info('Fetched conditions ' + str(len(loc_conds))) logger.info('Loading target loc failure conditions') - tgt_loc_conds = ac.loc_conditions_by_name_monitor(tgt_api_key, tgt_policy['id']) + tgt_loc_conds = ac.loc_conditions_by_name_monitor(tgt_api_key, tgt_policy['id'], tgt_region) condition_num = 0 for loc_condition in loc_conds: condition_num = condition_num + 1 condition_row = policy_name + '-sloccon' + str(condition_num) tgt_entities = [] for entity_id in loc_condition['entities']: - src_monitor_name = mc.get_monitor(src_api_key, entity_id)['monitor']['name'] + src_monitor_name = mc.get_monitor(src_api_key, entity_id, src_region)['monitor']['name'] all_alert_status[condition_row] = {cs.COND_NAME: loc_condition['name']} all_alert_status[condition_row][cs.SRC_MONITOR] = src_monitor_name - result = ec.gql_get_matching_entity_by_name(tgt_api_key, ec.SYNTH_MONITOR, src_monitor_name, tgt_acct_id) + result = ec.gql_get_matching_entity_by_name(tgt_api_key, ec.SYNTH_MONITOR, src_monitor_name, + tgt_acct_id, tgt_region) if not result['entityFound']: all_alert_status[condition_row][cs.TGT_MONITOR] = 'NOT_FOUND' logger.warn('No matching entity found in target account ' + src_monitor_name) @@ -45,7 +47,7 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, if len(tgt_entities) > 0: logger.info('Creating target synthetic condition ' + loc_condition['name']) tgt_condition = create_tgt_loc_condition(loc_condition, tgt_entities, match_source_status) - result = ac.create_loc_failure_condition(tgt_api_key, tgt_policy, tgt_condition) + result = ac.create_loc_failure_condition(tgt_api_key, tgt_policy, tgt_condition, tgt_region) all_alert_status[condition_row][cs.STATUS] = result['status'] if 'error' in result.keys(): all_alert_status[condition_row][cs.ERROR] = result['error'] diff --git a/library/migrator/nrql_conditions.py b/library/migrator/nrql_conditions.py index f5e02ce..61e18df 100644 --- a/library/migrator/nrql_conditions.py +++ b/library/migrator/nrql_conditions.py @@ -6,9 +6,10 @@ logger = logger.get_logger(os.path.basename(__file__)) -def migrate(all_alert_status, policy_name, src_acct_id, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): +def migrate(all_alert_status, policy_name, src_acct_id, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): logger.info('Loading source NRQL conditions ') - result = ac.get_nrql_conditions(src_api_key, src_acct_id, src_policy['id']) + result = ac.get_nrql_conditions(src_api_key, src_acct_id, src_policy['id'], src_region) if result['error']: all_alert_status[policy_name][cs.ERROR] = result['error'] return @@ -17,7 +18,7 @@ def migrate(all_alert_status, policy_name, src_acct_id, src_api_key, src_policy, logger.info('Fetched %d source conditions' % len(nrql_conds)) logger.info('Loading target NRQL conditions') - result = ac.nrql_conditions_by_name(tgt_api_key, tgt_acct_id, tgt_policy['id']) + result = ac.nrql_conditions_by_name(tgt_api_key, tgt_acct_id, tgt_policy['id'], tgt_region) if result['error']: all_alert_status[policy_name][cs.ERROR] = result['error'] return @@ -36,7 +37,8 @@ def migrate(all_alert_status, policy_name, src_acct_id, src_api_key, src_policy, all_alert_status[condition_row][cs.COND_EXISTED_TARGET] = 'N' logger.info('Creating target NRQL condition %s' % nrql_condition['name']) tgt_condition = create_tgt_nrql_condition(nrql_condition, match_source_status) - result = ac.create_nrql_condition(tgt_api_key, tgt_acct_id, tgt_policy['id'], tgt_condition, nrql_condition['type']) + result = ac.create_nrql_condition(tgt_api_key, tgt_region, tgt_acct_id, tgt_policy['id'], + tgt_condition, nrql_condition['type']) all_alert_status[condition_row][cs.STATUS] = result['status'] if 'error' in result.keys(): all_alert_status[condition_row][cs.ERROR] = result['error'] diff --git a/library/migrator/synth_conditions.py b/library/migrator/synth_conditions.py index f0f5d10..811c68a 100644 --- a/library/migrator/synth_conditions.py +++ b/library/migrator/synth_conditions.py @@ -8,21 +8,23 @@ logger = logger.get_logger(os.path.basename(__file__)) -def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_api_key, tgt_policy, match_source_status): +def migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_acct_id, tgt_api_key, tgt_region, tgt_policy, match_source_status): logger.info('Loading source synthetic conditions ') - synth_conditions = ac.get_synthetic_conditions(src_api_key, src_policy['id'])[ac.SYNTH_CONDITIONS] + synth_conditions = ac.get_synthetic_conditions(src_api_key, src_policy['id'], src_region)[ac.SYNTH_CONDITIONS] logger.info('Found synthetic conditions ' + str(len(synth_conditions))) logger.info('Loading target synthetic conditions ' + policy_name) - tgt_synth_conds = ac.synth_conditions_by_name_monitor(tgt_api_key, tgt_policy['id']) + tgt_synth_conds = ac.synth_conditions_by_name_monitor(tgt_api_key, tgt_policy['id'], tgt_region) condition_num = 0 for synth_condition in synth_conditions: condition_num = condition_num + 1 condition_row = policy_name + '-scon' + str(condition_num) src_monitor_id = synth_condition[ac.MONITOR_ID] - src_monitor_name = mc.get_monitor(src_api_key, src_monitor_id)['monitor']['name'] + src_monitor_name = mc.get_monitor(src_api_key, src_monitor_id, src_region)['monitor']['name'] all_alert_status[condition_row] = {cs.COND_NAME: synth_condition['name']} all_alert_status[condition_row][cs.SRC_MONITOR] = src_monitor_name - result = ec.gql_get_matching_entity_by_name(tgt_api_key, ec.SYNTH_MONITOR, src_monitor_name, tgt_acct_id) + result = ec.gql_get_matching_entity_by_name(tgt_api_key, ec.SYNTH_MONITOR, src_monitor_name, + tgt_acct_id, tgt_region) if result['entityFound']: tgt_monitor = result['entity'] all_alert_status[condition_row][cs.TGT_ACCOUNT] = tgt_monitor['accountId'] @@ -31,8 +33,10 @@ def migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_acct_id, tgt_key = synth_condition['name'] + tgt_monitor['monitorId'] if tgt_key not in tgt_synth_conds: logger.info('Creating target synthetic condition ' + synth_condition['name']) - tgt_condition = create_tgt_synth_condition(synth_condition, tgt_monitor['monitorId'], match_source_status) - result = ac.create_synthetic_condition(tgt_api_key, tgt_policy, tgt_condition, tgt_monitor['name']) + tgt_condition = create_tgt_synth_condition(synth_condition, tgt_monitor['monitorId'], + match_source_status) + result = ac.create_synthetic_condition(tgt_api_key, tgt_policy, tgt_condition, + tgt_monitor['name'], tgt_region) all_alert_status[condition_row][cs.STATUS] = result['status'] if 'error' in result.keys(): all_alert_status[condition_row][cs.ERROR] = result['error'] diff --git a/library/securecredentials.py b/library/securecredentials.py index 5152b4d..00a91eb 100644 --- a/library/securecredentials.py +++ b/library/securecredentials.py @@ -6,9 +6,9 @@ from library.status.monitorstatus import SEC_CREDENTIALS from library.status.monitorstatus import CHECK_COUNT import library.migrationlogger as migrationlogger +from library.clients.endpoints import Endpoints logger = migrationlogger.get_logger(os.path.basename(__file__)) -SEC_CREDENTIALS_URL = 'https://synthetics.newrelic.com/synthetics/api/v1/secure-credentials' query_secure_credentials_for = "FROM SyntheticCheck SELECT uniques(secureCredentials), count(monitorName) " \ "SINCE 7 days ago WHERE monitorName = " @@ -30,13 +30,13 @@ def from_script(script): # returns set of secureCredentials and number of checks run , # checkCount of 0 indicates monitor hasn't been run in the past week -def from_insights(insights_query_key, account_id, monitor_name): +def from_insights(insights_query_key, account_id, monitor_name, region=Endpoints.REGION_US): logger.info("Fetching secure credentials for " + monitor_name) escaped_monitor_name = escape(monitor_name) query = query_secure_credentials_for + "'" + escaped_monitor_name + "'" secure_credentials = [] credentials_and_checks = {SEC_CREDENTIALS: secure_credentials, CHECK_COUNT: 0} - result = insightsclient.execute(insights_query_key, account_id, query) + result = insightsclient.execute(insights_query_key, account_id, query, region) if result['status'] == 200: results_json = result['json'] secure_credentials = results_json['results'][0]['members'] @@ -57,14 +57,14 @@ def escape(monitor_name): return monitor_name -def create(api_key, scripted_monitors): +def create(api_key, scripted_monitors, region=Endpoints.REGION_US): sec_creds_set = get_unique_credentials(scripted_monitors) secure_credential_status = {} for secure_cred in sec_creds_set: sec_cred_data = {'key': secure_cred, 'value': 'dummy', 'description': 'PLEASE UPDATE. Created by migration script.'} sec_cred_json_str = json.dumps(sec_cred_data) - response = requests.post(SEC_CREDENTIALS_URL, headers=setup_headers(api_key), data=sec_cred_json_str) + response = requests.post(Endpoints.of(region).SEC_CREDENTIALS_URL, headers=setup_headers(api_key), data=sec_cred_json_str) status = {'sec_cred_status': response.status_code} if response.text: status['body'] = response.text @@ -83,13 +83,13 @@ def get_unique_credentials(scripted_monitors): return secure_credentials_set -def delete_all(api_key, account_id): +def delete_all(api_key, account_id, region): logger.warn('Deleting all secure credentials for ' + account_id) - result = requests.get(SEC_CREDENTIALS_URL, headers=setup_headers(api_key)) + result = requests.get(Endpoints.of(region).SEC_CREDENTIALS_URL, headers=setup_headers(api_key)) if result.status_code == 200: response_json = result.json() sec_creds = response_json['secureCredentials'] for sec_cred in sec_creds: logger.info('Deleting ' + sec_cred['key']) - result = requests.delete(SEC_CREDENTIALS_URL + '/' + sec_cred['key'], headers=setup_headers(api_key)) - logger.info('Delete status ' + str(result.status_code)) \ No newline at end of file + result = requests.delete(Endpoints.of(region).SEC_CREDENTIALS_URL + '/' + sec_cred['key'], headers=setup_headers(api_key)) + logger.info('Delete status ' + str(result.status_code)) diff --git a/library/utils.py b/library/utils.py index a323fde..212e060 100644 --- a/library/utils.py +++ b/library/utils.py @@ -16,6 +16,7 @@ INFRA_PAGINATION = 'infra' logger = m_logger.get_logger(os.path.basename(__file__)) + def configure_loglevel(args): log_level = logging.INFO if args.debug: @@ -23,6 +24,7 @@ def configure_loglevel(args): m_logger.set_log_level(log_level) + def setup_headers(api_key): return {'Api-Key': api_key, 'Content-Type': 'application/json'} @@ -112,6 +114,28 @@ def ensure_source_api_key(args): api_key = os.environ.get('ENV_SOURCE_API_KEY') return api_key + +def ensure_region(args): + region = 'us' + if args.region and len(args.region) > 0: + region = args.region[0] + return region + + +def ensure_source_region(args): + sourceRegion = 'us' + if args.sourceRegion and len(args.sourceRegion) > 0: + sourceRegion = args.sourceRegion[0] + return sourceRegion + + +def ensure_target_region(args): + targetRegion = 'us' + if args.targetRegion and len(args.targetRegion) > 0: + targetRegion = args.targetRegion[0] + return targetRegion + + def error_and_exit(param_name, env_name): error_message_and_exit('Error: Missing param ' + param_name + ' or env variable ' + env_name) @@ -143,21 +167,20 @@ def get_condition_prefix(entity_type): return '-ktcon' -def load_alert_policy_names(policyNameFile, entityNameFile, account_id, api_key, use_local): +def load_alert_policy_names(policyNameFile, entityNameFile, account_id, region, api_key, use_local): names = set() if policyNameFile: policy_names = store.load_names(policyNameFile) names.update(set(policy_names)) - if entityNameFile: entity_names = store.load_names(entityNameFile) if entity_names: - policy_names = ac.get_policy_names_by_entities(entity_names, account_id, api_key, use_local) + policy_names = ac.get_policy_names_by_entities(entity_names, account_id, api_key, use_local, region) if policy_names: names.update(set(policy_names)) - return list(names) + def config_get( config: configparser.ConfigParser, section_name: str, @@ -169,6 +192,7 @@ def config_get( return os.environ.get('ENV_%s' % key.upper()) + def process_base_config( config: configparser.ConfigParser, section_name: str, @@ -184,6 +208,18 @@ def process_base_config( error_message_and_exit('A source account ID is required') source_account_id = args.source_account_id[0] + source_region = config_get( + config, + section_name, + 'source_region' + ) + if not source_region: + if not args.source_region: + logger.info('source_region not specified defaulting to us') + source_region = 'us' + else: + source_region = args.source_region[0] + target_account_id = config_get( config, section_name, @@ -194,6 +230,18 @@ def process_base_config( error_message_and_exit('A target account ID is required') target_account_id = args.target_account_id[0] + target_region = config_get( + config, + section_name, + 'target_region' + ) + if not target_region: + if not args.target_region: + logger.info('target_region not specified defaulting to us') + target_region = 'us' + else: + target_region = args.target_region[0] + source_api_key = config_get(config, section_name, 'source_api_key') if not source_api_key: if not args.source_api_key: @@ -208,7 +256,9 @@ def process_base_config( return { 'source_account_id': source_account_id, + 'source_region': source_region, 'target_account_id': target_account_id, + 'target_region': target_region, 'source_api_key': source_api_key, 'target_api_key': target_api_key, - } \ No newline at end of file + } diff --git a/migrate_apm.py b/migrate_apm.py index 6eb4876..a5e0eb6 100644 --- a/migrate_apm.py +++ b/migrate_apm.py @@ -8,7 +8,7 @@ logger = m_logger.get_logger(os.path.basename(__file__)) MIGRATE_SETTINGS = 'settings' -DEFAULT_MIGRATE_LIST = (MIGRATE_SETTINGS) +DEFAULT_MIGRATE_LIST = [MIGRATE_SETTINGS] # api_key: { appName: srcEntity } app_src_entities = {} app_names = [] @@ -19,36 +19,43 @@ def setup_params(parser): parser.add_argument('--sourceAccount', nargs=1, type=str, required=True, help='Source accountId local Store \ like db//monitors .') parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key') + parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') parser.add_argument('--targetAccount', nargs=1, type=str, required=True, help='Target accountId or \ set environment variable ENV_SOURCE_API_KEY') parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ or set environment variable ENV_TARGET_API_KEY') - parser.add_argument('--settings', dest='settings', required=False, action='store_true', - help='Pass --settings to migrate settings for apdex thresholds and real end user monitoring') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') -def print_args(src_api_key, tgt_api_key): + +def print_args(src_api_key, sourceRegion, tgt_api_key, targetRegion, args): logger.info("Using fromFile : " + args.fromFile[0]) logger.info("Using sourceAccount : " + str(args.sourceAccount[0])) logger.info("Using sourceApiKey : " + len(src_api_key[:-4]) * "*" + src_api_key[-4:]) + if args.sourceRegion and len(args.sourceRegion) > 0: + logger.info("sourceRegion : " + args.sourceRegion[0]) + else: + logger.info("sourceRegion not passed : Defaulting to " + sourceRegion) logger.info("Using targetAccount : " + str(args.targetAccount[0])) logger.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) - if args.settings: - logger.info("Migrating APM Settings") + if args.targetRegion and len(args.targetRegion) > 0: + logger.info("targetRegion : " + args.targetRegion[0]) + else: + logger.info("targetRegion not passed : Defaulting to " + targetRegion) -def get_entity_by_name(api_key, app_name): - result = ec.get_apm_entity_by_name(api_key, app_name) +def get_entity_by_name(api_key, app_name, region): + result = ec.get_apm_entity_by_name(api_key, app_name, region) if not result['entityFound']: logger.warning('Could not locate source application ' + app_name) return None return result['entity'] -def get_src_entity(api_key, app_name): +def get_src_entity(api_key, app_name, src_region,): global app_src_entities if api_key in app_src_entities and app_name in app_src_entities[api_key]: return app_src_entities[api_key][app_name] - src_entity = get_entity_by_name(api_key, app_name) + src_entity = get_entity_by_name(api_key, app_name, src_region) if api_key in app_src_entities: app_src_entities[api_key][app_name] = src_entity else: @@ -57,22 +64,22 @@ def get_src_entity(api_key, app_name): # from_file contains list of app_names whose settings need to be migrated -def migrate_settings(from_file, src_api_key, tgt_api_key, all_apps_status): +def migrate_settings(from_file, src_api_key, src_region, tgt_api_key, tgt_region, all_apps_status): global app_names if not app_names: app_names = store.load_names(from_file) for app_name in app_names: - src_entity = get_src_entity(src_api_key, app_name) + src_entity = get_src_entity(src_api_key, app_name, src_region) if src_entity is None: logger.warn('Could not find src entity skipping ' + app_name) continue - tgt_entity = get_entity_by_name(tgt_api_key, app_name) + tgt_entity = get_entity_by_name(tgt_api_key, app_name, tgt_region) if tgt_entity is None: logger.warn('Could not find target entity skipping ' + app_name) continue src_settings = {'settings': src_entity['settings']} logger.info('Updating settings for ' + app_name) - result = ec.put_apm_settings(tgt_api_key, str(tgt_entity['id']), {'application': src_settings}) + result = ec.put_apm_settings(tgt_api_key, str(tgt_entity['id']), {'application': src_settings}, tgt_region) logger.info('Updated settings results ' + app_name + str(result)) update_settings_status(all_apps_status, app_name, result) @@ -87,17 +94,15 @@ def update_settings_status(all_apps_status, app_name, result): all_apps_status[app_name][appkeys.ENABLE_RUM] = result['application']['settings']['enable_real_user_monitoring'] -def migrate_apps(from_file, src_acct, src_api_key, - tgt_acct, tgt_api_key, migrate_list=DEFAULT_MIGRATE_LIST): +def migrate_apps(from_file, src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region): all_apps_status = {} - if MIGRATE_SETTINGS in migrate_list: - migrate_settings(from_file, src_api_key, tgt_api_key, all_apps_status) + migrate_settings(from_file, src_api_key, src_region, tgt_api_key, tgt_region, all_apps_status) file_name = utils.file_name_from(from_file) status_csv = src_acct + "_" + file_name + "_migrate_apm_" + tgt_acct + ".csv" store.save_status_csv(status_csv, all_apps_status, appkeys) -if __name__ == '__main__': +def main(): parser = argparse.ArgumentParser(description='Migrate APM settings for list of apps from one account to another') setup_params(parser) args = parser.parse_args() @@ -107,11 +112,12 @@ def migrate_apps(from_file, src_acct, src_api_key, target_api_key = utils.ensure_target_api_key(args) if not target_api_key: utils.error_and_exit('targetApiKey', 'ENV_TARGET_API_KEY') - if not args.settings and not args.labels: - logger.error("One or both of --labels or --settings must be passed") - print_args(source_api_key, target_api_key) - mig_list = [] - if args.settings: - mig_list.append(MIGRATE_SETTINGS) - migrate_apps(args.fromFile[0], args.sourceAccount[0], source_api_key, - args.targetAccount[0], target_api_key, mig_list) + src_region = utils.ensure_source_region(args) + tgt_region = utils.ensure_target_region(args) + print_args(source_api_key, src_region, target_api_key, tgt_region, args) + migrate_apps(args.fromFile[0], args.sourceAccount[0], source_api_key, src_region, + args.targetAccount[0], target_api_key, tgt_region) + + +if __name__ == '__main__': + main() diff --git a/migrate_dashboards.py b/migrate_dashboards.py index d7db8fe..ed1583c 100644 --- a/migrate_dashboards.py +++ b/migrate_dashboards.py @@ -10,45 +10,53 @@ log = m_logger.get_logger(os.path.basename(__file__)) -def print_args(src_api_key, tgt_api_key): +def print_args(args, src_api_key, sourceRegion, tgt_api_key, targetRegion): log.info("Using fromFile : " + args.fromFile[0]) log.info("Using sourceAccount : " + str(args.sourceAccount[0])) log.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) + if args.sourceRegion and len(args.sourceRegion) > 0: + log.info("sourceRegion : " + args.sourceRegion[0]) + else: + log.info("sourceRegion not passed : Defaulting to " + sourceRegion) log.info("Using targetAccount : " + str(args.targetAccount[0])) log.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) + if args.targetRegion and len(args.targetRegion) > 0: + log.info("targetRegion : " + args.targetRegion[0]) + else: + log.info("targetRegion not passed : Defaulting to " + targetRegion) -def setup_params(): +def configure_parser(): + parser = argparse.ArgumentParser(description='Migrate Dashboards') parser.add_argument('--fromFile', nargs=1, type=str, required=True, help='Path to file with dashboard names(newline separated)') parser.add_argument('--sourceAccount', nargs=1, type=int, required=True, help='Source accountId') parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key or \ set environment variable ENV_SOURCE_API_KEY') + parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') parser.add_argument('--targetAccount', nargs=1, type=int, required=True, help='Target accountId') parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ or set environment variable ENV_TARGET_API_KEY') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') + return parser -def get_dashboard(per_api_key, name, all_db_status, acct_id, get_widgets = False): - result = ec.get_dashboard_definition(per_api_key, name, acct_id) +def get_dashboard(per_api_key, name, all_db_status, acct_id, *, get_widgets=False, region='us'): + result = ec.get_dashboard_definition(per_api_key, name, acct_id, region) if not result: all_db_status[name][ds.DASHBOARD_FOUND] = False return None - all_db_status[name][ds.DASHBOARD_FOUND] = True if not get_widgets: return result - - widgets_result = ec.get_dashboard_widgets(per_api_key, result['guid']) + widgets_result = ec.get_dashboard_widgets(per_api_key, result['guid'], region) if 'error' in widgets_result: all_db_status[name][ds.ERROR] = result['error'] log.error('Error fetching dashboard widgets' + name + ' ' + result['error']) return None - if not widgets_result['entityFound']: all_db_status[name][ds.WIDGETS_FOUND] = False return None - all_db_status[name][ds.WIDGETS_FOUND] = True return widgets_result['entity'] @@ -56,42 +64,40 @@ def get_dashboard(per_api_key, name, all_db_status, acct_id, get_widgets = False def update_nrql_account_ids(src_acct_id, tgt_acct_id, entity): if not 'pages' in entity: return - for page in entity['pages']: if not 'widgets' in page: continue - for widget in page['widgets']: if not 'rawConfiguration' in widget: continue - if not 'nrqlQueries' in widget['rawConfiguration']: continue - for query in widget['rawConfiguration']['nrqlQueries']: if 'accountId' in query and query['accountId'] == src_acct_id: query['accountId'] = tgt_acct_id -def migrate_dashboards(from_file, src_acct, src_api_key, tgt_acct, tgt_api_key): + +def migrate_dashboards(from_file, src_acct, src_api_key, src_region, tgt_acct, tgt_api_key, tgt_region): log.info('Dashboard migration started.') db_names = store.load_names(from_file) all_db_status = {} for db_name in db_names: all_db_status[db_name] = {} - tgt_dashboard = get_dashboard(tgt_api_key, db_name, all_db_status, tgt_acct) + tgt_dashboard = get_dashboard(tgt_api_key, db_name, all_db_status, tgt_acct, + get_widgets=False, region=tgt_region) if tgt_dashboard is not None: log.warning('Dashboard already exists in target skipping : ' + db_name) all_db_status[db_name][ds.TARGET_EXISTED] = True continue all_db_status[db_name][ds.TARGET_EXISTED] = False - src_dashboard = get_dashboard(src_api_key, db_name, all_db_status, src_acct, True) + src_dashboard = get_dashboard(src_api_key, db_name, all_db_status, src_acct, get_widgets=True, region=src_region) if src_dashboard is None: continue log.info('Found source dashboard ' + db_name) tgt_dashboard = src_dashboard del tgt_dashboard['guid'] update_nrql_account_ids(src_acct, tgt_acct, tgt_dashboard) - result = ec.post_dashboard(tgt_api_key, tgt_dashboard, tgt_acct) + result = ec.post_dashboard(tgt_api_key, tgt_dashboard, tgt_acct, tgt_region) all_db_status[db_name][ds.STATUS] = result['status'] if result['entityCreated']: log.info('Created target dashboard ' + db_name) @@ -102,15 +108,21 @@ def migrate_dashboards(from_file, src_acct, src_api_key, tgt_acct, tgt_api_key): log.info('Dashboard migration complete.') -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Migrate Dashboards') - setup_params() +def main(): + parser = configure_parser() args = parser.parse_args() - source_api_key = utils.ensure_source_api_key(args) - if not source_api_key: + src_api_key = utils.ensure_source_api_key(args) + if not src_api_key: utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') - target_api_key = utils.ensure_target_api_key(args) - if not target_api_key: + tgt_api_key = utils.ensure_target_api_key(args) + if not tgt_api_key: utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') - print_args(source_api_key, target_api_key) - migrate_dashboards(args.fromFile[0], args.sourceAccount[0], source_api_key, args.targetAccount[0], target_api_key) \ No newline at end of file + src_region = utils.ensure_source_region(args) + tgt_region = utils.ensure_target_region(args) + print_args(args, src_api_key, src_region, tgt_api_key, tgt_region) + migrate_dashboards(args.fromFile[0], args.sourceAccount[0], src_api_key, src_region, args.targetAccount[0], + tgt_api_key, tgt_region) + + +if __name__ == '__main__': + main() diff --git a/migrateconditions.py b/migrateconditions.py index 7d6eae1..ec31759 100644 --- a/migrateconditions.py +++ b/migrateconditions.py @@ -23,12 +23,14 @@ INFRA_CONDITIONS = 'infra-conditions' ALL_CONDITIONS = [SYNTHETICS, APP_CONDITIONS, NRQL_CONDITIONS, EXT_SVC_CONDITIONS, INFRA_CONDITIONS] # currently used only for testing + def create_argument_parser(): parser = argparse.ArgumentParser( description='Migrate Alert Conditions from source to target policy' ) return configure_parser(parser) + def configure_parser( parser: argparse.ArgumentParser, is_standalone: bool = True @@ -60,6 +62,15 @@ def configure_parser( help='Source accountId', dest='source_account_id' ) + parser.add_argument( + '--sourceRegion', + '--source_region', + nargs=1, + type=str, + required=is_standalone, + help='Source Account Region us(default) or eu', + dest='source_region' + ) parser.add_argument( '--sourceApiKey', '--source_api_key', @@ -78,6 +89,15 @@ def configure_parser( help='Target accountId', dest='target_account_id' ) + parser.add_argument( + '--targetRegion', + '--target_region', + nargs=1, + type=str, + required=is_standalone, + help='Target Account Region us(default) or eu', + dest='target_region' + ) parser.add_argument( '--targetApiKey', '--target_api_key', @@ -93,7 +113,8 @@ def configure_parser( dest='match_source_state', required=False, action='store_true', - help='Pass --matchSourceState to match condition enable/disable state from source account instead of disabling in target account' + help='Pass --matchSourceState to match condition enable/disable state from source account instead of disabling ' + 'in target account' ) parser.add_argument( '--synthetics', @@ -147,14 +168,23 @@ def configure_parser( ) return parser -def print_args(src_api_key, tgt_api_key): + +def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): if (args.policy_file): logger.info("Using fromFile : " + args.policy_file[0]) if (args.entity_file): logger.info("Using fromFileEntities : " + args.entity_file[0]) logger.info("Using sourceAccount : " + args.source_account_id[0]) + if args.sourceRegion and len(args.sourceRegion) > 0: + logger.info("sourceRegion : " + args.sourceRegion[0]) + else: + logger.info("sourceRegion not passed : Defaulting to " + src_region) logger.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) logger.info("Using targetAccount : " + args.target_account_id[0]) + if args.targetRegion and len(args.targetRegion) > 0: + logger.info("targetRegion : " + args.targetRegion[0]) + else: + logger.info("targetRegion not passed : Defaulting to " + tgt_region) logger.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) if args.match_source_state: logger.info("Matching condition enable/disable state in target account instead of disabling all new conditions") @@ -173,42 +203,46 @@ def print_args(src_api_key, tgt_api_key): if args.use_local: logger.info("Using local copy of alert policies and policy entity map") -def migrate_conditions(policy_names, src_account_id, src_api_key, tgt_account_id, tgt_api_key, cond_types, match_source_status): + +def migrate_conditions(policy_names, src_account_id, src_region, src_api_key, + tgt_account_id, tgt_region, tgt_api_key, cond_types, + match_source_status): all_alert_status = {} for policy_name in policy_names: logger.info('Migrating conditions for policy ' + policy_name) all_alert_status[policy_name] = {} - src_result = ac.get_policy(src_api_key, policy_name) + src_result = ac.get_policy(src_api_key, policy_name, src_region) if not src_result['policyFound']: logger.error("Skipping as policy not found in source account " + policy_name) all_alert_status[policy_name][cs.ERROR] = 'Policy not found in source account' continue src_policy = src_result['policy'] - tgt_result = ac.get_policy(tgt_api_key, policy_name) + tgt_result = ac.get_policy(tgt_api_key, policy_name, tgt_region) if not tgt_result['policyFound']: logger.error("Skipping as policy not found in target account " + policy_name) all_alert_status[policy_name][cs.ERROR] = 'Policy not found in target account' continue tgt_policy = tgt_result['policy'] if SYNTHETICS in cond_types: - sc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_policy, - tgt_account_id, tgt_api_key, tgt_policy, match_source_status) - lfc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_policy, - tgt_account_id, tgt_api_key, tgt_policy, match_source_status) + sc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) + lfc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) if APP_CONDITIONS in cond_types: - ac_migrator.migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_account_id, - tgt_api_key, tgt_policy, match_source_status) + ac_migrator.migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) if NRQL_CONDITIONS in cond_types: - nrql_migrator.migrate(all_alert_status, policy_name, src_account_id, src_api_key, src_policy, tgt_account_id, - tgt_api_key, tgt_policy, match_source_status) + nrql_migrator.migrate(all_alert_status, policy_name, src_account_id, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) if EXT_SVC_CONDITIONS in cond_types: - extsvc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_account_id, - tgt_api_key, tgt_policy, match_source_status) + extsvc_migrator.migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) if INFRA_CONDITIONS in cond_types: - infra_migrator.migrate(all_alert_status, policy_name, src_api_key, src_policy, tgt_account_id, - tgt_api_key, tgt_policy, match_source_status) + infra_migrator.migrate(all_alert_status, policy_name, src_api_key, src_region, src_policy, + tgt_account_id, tgt_api_key, tgt_region, tgt_policy, match_source_status) return all_alert_status + def parse_condition_types(args): if args.all: return ALL_CONDITIONS @@ -226,6 +260,7 @@ def parse_condition_types(args): condition_types.append(INFRA_CONDITIONS) return condition_types + def parse_condition_types_with_config( config: configparser.ConfigParser, args: argparse.Namespace @@ -233,7 +268,7 @@ def parse_condition_types_with_config( if config.getboolean( 'migrate.conditions', 'all', - fallback = args.all + fallback=args.all ): return ALL_CONDITIONS @@ -241,25 +276,25 @@ def parse_condition_types_with_config( if config.getboolean( 'migrate.conditions', 'synthetics', - fallback = args.synthetics + fallback=args.synthetics ): condition_types.append(SYNTHETICS) if config.getboolean( 'migrate.conditions', 'app_conditions', - fallback = args.app_conditions + fallback=args.app_conditions ): condition_types.append(APP_CONDITIONS) if config.getboolean( 'migrate.conditions', 'nrql_conditions', - fallback = args.nrql_conditions + fallback=args.nrql_conditions ): condition_types.append(NRQL_CONDITIONS) if config.getboolean( 'migrate.conditions', 'ext_svc_conditions', - fallback = args.ext_svc_conditions + fallback=args.ext_svc_conditions ): condition_types.append(EXT_SVC_CONDITIONS) if config.getboolean( @@ -276,7 +311,9 @@ def migrate( policy_file_path: str, entity_file_path: str, source_acct_id: int, + source_region: str, target_acct_id: int, + target_region: str, source_api_key: str, target_api_key: str, cond_types: List[str], @@ -287,6 +324,7 @@ def migrate( policy_file_path, entity_file_path, source_acct_id, + source_region, source_api_key, use_local ) @@ -294,8 +332,10 @@ def migrate( status = migrate_conditions( policy_names, source_acct_id, + source_region, source_api_key, target_acct_id, + target_region, target_api_key, cond_types, match_source_state @@ -311,6 +351,7 @@ def migrate( return status_file + class MigrateConditionsCommand: def configure_parser(self, migrate_subparsers, global_options_parser): # Create the parser for the "conditions" command @@ -335,7 +376,7 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): policy_file_path = config.get( 'migrate.conditions', 'policy_file', - fallback = None + fallback=None ) if not policy_file_path: if args.policy_file: @@ -344,7 +385,7 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): entity_file_path = config.get( 'migrate.conditions', 'entity_file', - fallback = None + fallback=None ) if not entity_file_path: if args.entity_file: @@ -358,12 +399,12 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): use_local = config.getboolean( 'migrate.conditions', 'use_local', - fallback = args.use_local + fallback=args.use_local ) match_source_state = config.getboolean( 'migrate.conditions', 'match_source_state', - fallback = args.match_source_state + fallback=args.match_source_state ) cond_types = parse_condition_types_with_config(config, args) @@ -376,7 +417,9 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): policy_file_path, entity_file_path, base_config['source_account_id'], + base_config['source_region'], base_config['target_account_id'], + base_config['target_region'], base_config['source_api_key'], base_config['target_api_key'], cond_types, @@ -386,19 +429,17 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): logger.info('Completed alert condition migration.') -if __name__ == '__main__': - parser = create_argument_parser() +def main(): + parser = create_argument_parser() args = parser.parse_args() - - source_api_key = utils.ensure_source_api_key(args) - if not source_api_key: + src_api_key = utils.ensure_source_api_key(args) + if not src_api_key: utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') - - target_api_key = utils.ensure_target_api_key(args) - if not target_api_key: + tgt_api_key = utils.ensure_target_api_key(args) + if not tgt_api_key: utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') - + cond_types = parse_condition_types(args) if len(cond_types) == 0: logger.error('At least one condition type must be specified currently supported ' + @@ -410,17 +451,23 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): if not policy_file and not entity_file: logger.error('Either a policy file or entity file must be specified.') sys.exit() - - print_args(source_api_key, target_api_key) - + src_region = utils.ensure_source_region(args) + tgt_region = utils.ensure_target_region(args) + print_args(src_api_key, src_region, tgt_api_key, tgt_region) migrate( policy_file, entity_file, args.source_account_id[0], + src_region, args.target_account_id[0], - source_api_key, - target_api_key, + tgt_region, + src_api_key, + tgt_api_key, cond_types, args.use_local, args.match_source_state - ) \ No newline at end of file + ) + + +if __name__ == '__main__': + main() diff --git a/migratemonitors.py b/migratemonitors.py index d951d2c..82e1989 100644 --- a/migratemonitors.py +++ b/migratemonitors.py @@ -31,10 +31,12 @@ def setup_params(): parser.add_argument('--fromFile', nargs=1, type=str, required=True, help='Path to file with monitor names') parser.add_argument('--sourceAccount', nargs=1, type=str, required=True, help='Source accountId local Store \ like db//monitors .') + parser.add_argument('--sourceRegion', type=str, nargs=1, required=False, help='sourceRegion us(default) or eu') parser.add_argument('--sourceApiKey', nargs=1, type=str, required=True, help='Source account API Key, \ ignored if useLocal is passed') parser.add_argument('--targetAccount', nargs=1, type=str, required=True, help='Target accountId or \ set environment variable ENV_SOURCE_API_KEY') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') parser.add_argument('--targetApiKey', nargs=1, type=str, required=True, help='Target API Key, \ or set environment variable ENV_TARGET_API_KEY') parser.add_argument('--timeStamp', nargs=1, type=str, required=True, help='timeStamp to migrate') @@ -44,10 +46,14 @@ def setup_params(): # prints args and also sets the fetch_latest flag -def print_args(target_api_key): +def print_args(target_api_key, sourceRegion, targetRegion): global fetch_latest logger.info("Using fromFile : " + args.fromFile[0]) logger.info("Using sourceAccount : " + str(args.sourceAccount[0])) + if args.sourceRegion and len(args.sourceRegion) > 0: + logger.info("sourceRegion : " + args.sourceRegion[0]) + else: + logger.info("sourceRegion not passed : Defaulting to " + sourceRegion) if args.sourceApiKey: logger.info("Using sourceApiKey(ignored if --useLocal is passed) : " + len(args.sourceApiKey[0][:-4])*"*"+args.sourceApiKey[0][-4:]) @@ -59,6 +65,10 @@ def print_args(target_api_key): else: logger.info("Default fetch_latest :" + str(fetch_latest)) logger.info("Using targetAccount : " + str(args.targetAccount[0])) + if args.targetRegion and len(args.targetRegion) > 0: + logger.info("targetRegion : " + args.targetRegion[0]) + else: + logger.info("targetRegion not passed : Defaulting to " + targetRegion) logger.info("Using targetApiKey : " + len(target_api_key[:-4])*"*"+target_api_key[-4:]) logger.info("Using timeStamp : " + args.timeStamp[0]) @@ -75,7 +85,7 @@ def ensure_target_api_key(): return target_api_key -def migrate(all_monitors_json, src_account_id, src_api_key, tgt_acct_id, tgt_api_key): +def migrate(all_monitors_json, src_api_key, src_region, tgt_api_key, tgt_region): monitor_status = {} scripted_monitors = [] for monitor_json in all_monitors_json: @@ -83,13 +93,13 @@ def migrate(all_monitors_json, src_account_id, src_api_key, tgt_acct_id, tgt_api monitor_name = monitor_json['definition']['name'] source_monitor_id = monitor_json['definition']['id'] if fetch_latest: - result = get_monitor(src_api_key, source_monitor_id) + result = get_monitor(src_api_key, source_monitor_id, src_region) if result['status'] != 200: logger.error('Did not find monitor ' + source_monitor_id) logger.error(result) continue monitor_json['definition'] = result['monitor'] - post_monitor_definition(tgt_api_key, monitor_name, monitor_json, monitor_status) + post_monitor_definition(tgt_api_key, monitor_name, monitor_json, monitor_status, tgt_region) if monitortypes.is_scripted(monitor_json['definition']): scripted_monitors.append(monitor_json) if fetch_latest: @@ -105,11 +115,11 @@ def migrate(all_monitors_json, src_account_id, src_api_key, tgt_acct_id, tgt_api return monitor_status -def migrate_monitors(from_file, src_acct, src_api_key, time_stamp, tgt_acct_id, target_api_key): +def migrate_monitors(from_file, src_acct, src_region, src_api_key, time_stamp, tgt_acct_id, tgt_region, tgt_api_key): monitor_names = store.load_names(from_file) logger.debug(monitor_names) all_monitors_json = store.load_monitors(src_acct, time_stamp, monitor_names) - monitor_status = migrate(all_monitors_json, src_acct, src_api_key, tgt_acct_id, target_api_key) + monitor_status = migrate(all_monitors_json, src_api_key, src_region, tgt_api_key, tgt_region) logger.debug(monitor_status) file_name = utils.file_name_from(from_file) status_csv = src_acct + "_" + file_name + "_" + tgt_acct_id + ".csv" @@ -123,9 +133,11 @@ def main(): target_api_key = ensure_target_api_key() if not target_api_key: utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') - print_args(target_api_key) - migrate_monitors(args.fromFile[0], args.sourceAccount[0], args.sourceApiKey[0], args.timeStamp[0], - args.targetAccount[0], target_api_key) + sourceRegion = utils.ensure_source_region(args) + targetRegion = utils.ensure_target_region(args) + print_args(target_api_key, sourceRegion, targetRegion) + migrate_monitors(args.fromFile[0], args.sourceAccount[0], sourceRegion, args.sourceApiKey[0], args.timeStamp[0], + args.targetAccount[0], targetRegion, target_api_key) if __name__ == '__main__': diff --git a/migratepolicies.py b/migratepolicies.py index 3679d3a..ce2d638 100644 --- a/migratepolicies.py +++ b/migratepolicies.py @@ -2,6 +2,9 @@ import configparser import os import sys + +from typing import List + import library.localstore as store import library.status.alertstatus as askeys import library.migrationlogger as m_logger @@ -20,12 +23,14 @@ logger = m_logger.get_logger(os.path.basename(__file__)) fetch_channels = True + def create_argument_parser(): parser = argparse.ArgumentParser( description='Migrate Alert Policies and channels' ) return configure_parser(parser) + def configure_parser( parser: argparse.ArgumentParser, is_standalone: bool = True @@ -57,6 +62,15 @@ def configure_parser( help='Source accountId', dest='source_account_id' ) + parser.add_argument( + '--sourceRegion', + '--source_region', + nargs=1, + type=str, + required=is_standalone, + help='Source Account Region us(default) or eu', + dest='source_region' + ) parser.add_argument( '--sourceApiKey', '--source_api_key', @@ -75,6 +89,15 @@ def configure_parser( help='Target accountId', dest='target_account_id' ) + parser.add_argument( + '--targetRegion', + '--target_region', + nargs=1, + type=str, + required=is_standalone, + help='Target Account Region us(default) or eu', + dest='target_region' + ) parser.add_argument( '--targetApiKey', '--target_api_key', @@ -96,15 +119,23 @@ def configure_parser( # prints args and also sets the fetch_latest flag -def print_args(src_api_key, tgt_api_key): +def print_args(args, src_api_key, src_region, tgt_api_key, tgt_region): global fetch_channels if (args.policy_file): logger.info("Using fromFile : " + args.policy_file[0]) if (args.entity_file): logger.info("Using fromFileEntities : " + args.entity_file[0]) logger.info("Using sourceAccount : " + str(args.source_account_id[0])) + if args.sourceRegion and len(args.sourceRegion) > 0: + logger.info("sourceRegion : " + args.sourceRegion[0]) + else: + logger.info("sourceRegion not passed : Defaulting to " + src_region) logger.info("Using sourceApiKey : " + len(src_api_key[:-4])*"*"+src_api_key[-4:]) logger.info("Using targetAccount : " + str(args.target_account_id[0])) + if args.targetRegion and len(args.targetRegion) > 0: + logger.info("targetRegion : " + args.targetRegion[0]) + else: + logger.info("targetRegion not passed : Defaulting to " + tgt_region) logger.info("Using targetApiKey : " + len(tgt_api_key[:-4]) * "*" + tgt_api_key[-4:]) if args.use_local: fetch_channels = False @@ -117,15 +148,17 @@ def print_args(src_api_key, tgt_api_key): def type_name_key(channel): return channel['type'] + '-' + channel['name'] -def get_channels_by_type_name(api_key): - result = ac.get_channels(api_key) + +def get_channels_by_type_name(api_key, region): + result = ac.get_channels(api_key, region) all_target_channels = result[askeys.CHANNELS] target_channels_by_type_name = {} for target_channel in all_target_channels: target_channels_by_type_name[type_name_key(target_channel)] = target_channel return target_channels_by_type_name -def update_notification_channels(tgt_api_key, source_policy, target_policy, loaded_src_channels, + +def update_notification_channels(tgt_api_key, tgt_region, source_policy, target_policy, loaded_src_channels, tgt_channels_by_type_name, all_alert_status): logger.info('Updating notification channels for ' + target_policy['name']) src_policy_id = str(source_policy['id']) @@ -142,23 +175,29 @@ def update_notification_channels(tgt_api_key, source_policy, target_policy, load for src_channel in src_channels: src_channel_type_name = type_name_key(src_channel) if src_channel_type_name not in tgt_channels_by_type_name: - result = ac.create_channel(tgt_api_key, src_channel) + logger.info(src_channel) + result = ac.create_channel(tgt_api_key, src_channel, tgt_region) if result['status'] == 201: tgt_type_name = type_name_key(result['channel']) logger.info('Created channel : ' + tgt_type_name) tgt_channels_by_type_name[tgt_type_name] = result['channel'] + else: + logger.error(result) # getting errors for channel of type user else: logger.info('Channel already existed : ' + src_channel_type_name) - target_channel_ids.append(tgt_channels_by_type_name[src_channel_type_name]['id']) - update_alert_status(all_alert_status, target_policy['name'], src_channel_type_name) - result = ac.put_channel_ids(tgt_api_key, target_policy['id'], target_channel_ids) + if src_channel_type_name in tgt_channels_by_type_name: + target_channel_ids.append(tgt_channels_by_type_name[src_channel_type_name]['id']) + update_alert_status(all_alert_status, target_policy['name'], src_channel_type_name) + result = ac.put_channel_ids(tgt_api_key, target_policy['id'], target_channel_ids, tgt_region) update_put_status(all_alert_status, result, target_policy) + def update_put_status(all_alert_status, result, target_policy): all_alert_status[target_policy['name']][askeys.STATUS] = result['status'] if 'channel_ids' in result: all_alert_status[target_policy['name']][askeys.PUT_CHANNELS] = result['channel_ids'] + def update_alert_status(all_alert_status, policy_name, src_channel_type_name): if askeys.CHANNELS in all_alert_status[policy_name]: all_alert_status[policy_name][askeys.CHANNELS] = all_alert_status[policy_name][askeys.CHANNELS] \ @@ -166,29 +205,30 @@ def update_alert_status(all_alert_status, policy_name, src_channel_type_name): else: all_alert_status[policy_name][askeys.CHANNELS] = src_channel_type_name -def migrate_alert_policies(policy_names, src_account, src_api_key, tgt_account, tgt_api_key): + +def migrate_alert_policies(policy_names: List[str], + src_account: int, src_api_key: str, src_region: str, + tgt_account: int, tgt_api_key: str, tgt_region: str): logger.info('Alert migration started.') all_alert_status = {} if fetch_channels: logger.info('Fetching latest channel info and policy assignment. This may take a while.....') - loaded_src_channels = fetchchannels.get_channels_by_id_policy(src_api_key) + loaded_src_channels = fetchchannels.get_channels_by_id_policy(src_api_key, src_region) else: logger.info('Loading pre-fetched channel and policy assignment information') loaded_src_channels = store.load_alert_channels(src_account) - tgt_channels_by_type_name = get_channels_by_type_name(tgt_api_key) - + tgt_channels_by_type_name = get_channels_by_type_name(tgt_api_key, tgt_region) logger.info('Migrating the following policies:') logger.info('%s' % policy_names) - for policy_name in policy_names: all_alert_status[policy_name] = {} - result = ac.get_policy(src_api_key, policy_name) + result = ac.get_policy(src_api_key, policy_name, src_region) if not result['policyFound']: logger.error("Skipping as policy not found in source account " + policy_name) all_alert_status[policy_name][askeys.ERROR] = "Policy Not found in source account" continue src_policy = result['policy'] - result = ac.get_policy(tgt_api_key, policy_name) + result = ac.get_policy(tgt_api_key, policy_name, tgt_region) if result['status'] in [200, 304] and result['policyFound']: logger.info('Policy exists : ' + policy_name) all_alert_status[policy_name] = {askeys.POLICY_EXISTED: True} @@ -196,25 +236,29 @@ def migrate_alert_policies(policy_names, src_account, src_api_key, tgt_account, else: logger.info('Creating : ' + policy_name) all_alert_status[policy_name] = {askeys.POLICY_EXISTED: False} - result = ac.create_alert_policy(tgt_api_key, src_policy) + result = ac.create_alert_policy(tgt_api_key, src_policy, tgt_region) update_create_status(all_alert_status, policy_name, result) tgt_policy = result['policy'] - update_notification_channels(tgt_api_key, src_policy, tgt_policy, loaded_src_channels, + update_notification_channels(tgt_api_key, tgt_region, src_policy, tgt_policy, loaded_src_channels, tgt_channels_by_type_name, all_alert_status) logger.info('Alert migration complete.') return all_alert_status + def update_create_status(all_alert_status, policy_name, result): all_alert_status[policy_name][askeys.STATUS] = result['status'] all_alert_status[policy_name][askeys.POLICY_CREATED] = result['entityCreated'] if 'error' in result: all_alert_status[policy_name][askeys.ERROR] = result['error'] + def migrate( policy_file_path: str, entity_file_path: str, source_acct_id: int, + source_region: str, target_acct_id: int, + target_region: str, source_api_key: str, target_api_key: str, use_local: bool = False @@ -223,6 +267,7 @@ def migrate( policy_file_path, entity_file_path, source_acct_id, + source_region, source_api_key, use_local ) @@ -231,8 +276,10 @@ def migrate( policy_names, source_acct_id, source_api_key, + source_region, target_acct_id, - target_api_key + target_api_key, + target_region ) status_file = ac.get_alert_status_file_name( @@ -245,6 +292,7 @@ def migrate( return status_file + class MigratePoliciesCommand: def configure_parser(self, migrate_subparsers, global_options_parser): # Create the parser for the "policies" command @@ -269,7 +317,7 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): policy_file_path = config.get( 'migrate.policies', 'policy_file', - fallback = None + fallback=None ) if not policy_file_path: if args.policy_file: @@ -278,7 +326,7 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): entity_file_path = config.get( 'migrate.policies', 'entity_file', - fallback = None + fallback=None ) if not entity_file_path: if args.entity_file: @@ -292,42 +340,40 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): use_local = config.getboolean( 'migrate.conditions', 'use_local', - fallback = args.use_local + fallback=args.use_local ) migrate( policy_file_path, entity_file_path, base_config['source_account_id'], + base_config['source_region'], base_config['target_account_id'], + base_config['target_region'], base_config['source_api_key'], base_config['target_api_key'], use_local ) - logger.info('Completed alert policy migration.') -if __name__ == '__main__': - parser = create_argument_parser() +def main(): + parser = create_argument_parser() args = parser.parse_args() - source_api_key = utils.ensure_source_api_key(args) if not source_api_key: utils.error_and_exit('source_api_key', 'ENV_SOURCE_API_KEY') - target_api_key = utils.ensure_target_api_key(args) if not target_api_key: utils.error_and_exit('target_api_key', 'ENV_TARGET_API_KEY') - policy_file = args.policy_file[0] if args.policy_file else None entity_file = args.entity_file[0] if args.entity_file else None if not policy_file and not entity_file: logger.error('Either a policy file or entity file must be specified.') sys.exit() - - print_args(source_api_key, target_api_key) - + sourceRegion = utils.ensure_source_region(args) + targetRegion = utils.ensure_target_region(args) + print_args(source_api_key, sourceRegion, target_api_key, targetRegion) migrate( policy_file, entity_file, @@ -337,3 +383,7 @@ def run(self, config: configparser.ConfigParser, args: argparse.Namespace): target_api_key, args.use_local ) + + +if __name__ == '__main__': + main() diff --git a/nrmig b/nrmig index c9c3ecf..dff81b9 100755 --- a/nrmig +++ b/nrmig @@ -76,6 +76,7 @@ def create_argument_parser() -> argparse.ArgumentParser: return parser + def parse_config(config_file_path: str) -> configparser.ConfigParser: """Parse the configuration file @@ -101,6 +102,7 @@ def parse_config(config_file_path: str) -> configparser.ConfigParser: return config + def main(): """Parse the CLI arguments and run the appropriate commands. """ @@ -124,6 +126,7 @@ def main(): logger.info('Done.') + if __name__ == "__main__": main() diff --git a/updatemonitors.py b/updatemonitors.py index 12dd9b6..411375f 100644 --- a/updatemonitors.py +++ b/updatemonitors.py @@ -11,17 +11,18 @@ logger = migrationlogger.get_logger(os.path.basename(__file__)) -def setup_params(): +def setup_params(parser): parser.add_argument('--fromFile', nargs=1, required=True, help='Path to file with monitor names, one per line') parser.add_argument('--targetApiKey', nargs=1, required=False, help='API Key for the account') parser.add_argument('--targetAccount', nargs=1, required=True, help='Target account') + parser.add_argument('--targetRegion', type=str, nargs=1, required=False, help='targetRegion us(default) or eu') parser.add_argument('--timeStamp', nargs=1, required=True, help='Timestamp of the pre-fetched monitors') parser.add_argument('--renamePrefix', nargs=1, required=False, help='Pass prefix to rename monitors') parser.add_argument('--disable', dest='disable', required=False, action='store_true', help='Pass --disable to disable the monitors') -def ensure_target_api_key(): +def ensure_target_api_key(args): if args.targetApiKey: api_key = args.targetApiKey[0] else: @@ -33,10 +34,14 @@ def ensure_target_api_key(): return api_key -def print_args(): +def print_args(args, targetRegion): logger.info("Using fromFile : " + args.fromFile[0]) logger.info("Using targetApiKey : " + args.targetApiKey[0]) logger.info("Using targetAccount : " + str(args.targetAccount[0])) + if args.targetRegion and len(args.targetRegion) > 0: + logger.info("targetRegion : " + args.targetRegion[0]) + else: + logger.info("targetRegion not passed : Defaulting to " + targetRegion) logger.info("Using timeStamp : " + args.timeStamp[0]) if args.renamePrefix: logger.info("Monitors will be prefixed with : " + args.renamePrefix[0]) @@ -47,7 +52,7 @@ def print_args(): logger.info("Disable Monitors : " + str(args.disable)) -def update_monitors(api_key, account_id, from_file, time_stamp, prefix, disable_flag): +def update_monitors(api_key, account_id, from_file, time_stamp, prefix, disable_flag, tgt_region): update_list = localstore.load_names(from_file) all_monitors = localstore.load_monitors(account_id, time_stamp, update_list) all_monitor_status = {} @@ -63,10 +68,10 @@ def update_monitors(api_key, account_id, from_file, time_stamp, prefix, disable_ if disable_flag: update_json['status'] = 'DISABLED' all_monitor_status[monitor_name][updatestatus.UPDATED_STATUS] = 'DISABLED' - result = monitorsclient.update(api_key, monitor_id, update_json, monitor_name) + result = monitorsclient.update(api_key, monitor_id, update_json, monitor_name, tgt_region) update_status(all_monitor_status, monitor_name, result) update_status_csv = str(account_id) + "_" + utils.file_name_from(from_file) + "_updated_monitors.csv" - localstore.store_update_monitor_status_csv(update_status_csv, all_monitor_status) + localstore.save_status_csv(update_status_csv, all_monitor_status, updatestatus) def update_status(all_monitor_status, monitor_name, result): @@ -77,20 +82,25 @@ def update_status(all_monitor_status, monitor_name, result): all_monitor_status[monitor_name][updatestatus.UPDATED_JSON] = result['updatedEntity'] -if __name__ == '__main__': +def main(): parser = argparse.ArgumentParser(description='Update Synthetic Monitors') - setup_params() + setup_params(parser) args = parser.parse_args() if not args.renamePrefix and not args.disable: logger.error("Missing update directive: Either --renamePrefix or --disable flag or both must be passed") sys.exit(1) - target_api_key = ensure_target_api_key() + target_api_key = ensure_target_api_key(args) rename_prefix = '' disable = False if args.renamePrefix: rename_prefix = args.renamePrefix[0] if args.disable: disable = True - print_args() + targetRegion = utils.ensure_target_region(args) + print_args(args, targetRegion) update_monitors(target_api_key, args.targetAccount[0], args.fromFile[0], args.timeStamp[0], - rename_prefix, disable) + rename_prefix, disable, targetRegion) + + +if __name__ == '__main__': + main()