Skip to content

Commit

Permalink
ATLAS-1407: improve LOG statement performance
Browse files Browse the repository at this point in the history
Signed-off-by: Madhan Neethiraj <[email protected]>
  • Loading branch information
apoorvnaik authored and mneethiraj committed Dec 21, 2016
1 parent 3725dcf commit 40e639e
Show file tree
Hide file tree
Showing 42 changed files with 193 additions and 217 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ public void destroy() throws FalconException {
@Override
public void onAdd(Entity entity) throws FalconException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> AtlasService.onAdd(" + entity + ")");
LOG.debug("==> AtlasService.onAdd({})", entity);
}

try {
Expand All @@ -123,14 +123,14 @@ public void onAdd(Entity entity) throws FalconException {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasService.onAdd(" + entity + ")");
LOG.debug("<== AtlasService.onAdd({})", entity);
}
}

@Override
public void onRemove(Entity entity) throws FalconException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> AtlasService.onRemove(" + entity + ")");
LOG.debug("==> AtlasService.onRemove({})", entity);
}

try {
Expand All @@ -141,14 +141,14 @@ public void onRemove(Entity entity) throws FalconException {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasService.onRemove(" + entity + ")");
LOG.debug("<== AtlasService.onRemove({})", entity);
}
}

@Override
public void onChange(Entity entity, Entity entity1) throws FalconException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> AtlasService.onChange(" + entity + ", " + entity1 + ")");
LOG.debug("==> AtlasService.onChange({}, {})", entity, entity1);
}

try {
Expand All @@ -159,14 +159,14 @@ public void onChange(Entity entity, Entity entity1) throws FalconException {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasService.onChange(" + entity + ", " + entity1 + ")");
LOG.debug("<== AtlasService.onChange({}, {})", entity, entity1);
}
}

@Override
public void onReload(Entity entity) throws FalconException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> AtlasService.onReload(" + entity + ")");
LOG.debug("==> AtlasService.onReload({})", entity);
}

try {
Expand All @@ -177,7 +177,7 @@ public void onReload(Entity entity) throws FalconException {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== AtlasService.onReload(" + entity + ")");
LOG.debug("<== AtlasService.onReload({})", entity);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ public void run() {
});
}
} catch (Throwable t) {
LOG.warn("Error in processing data " + data, t);
LOG.warn("Error in processing data {}", data, t);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -343,7 +343,7 @@ protected void waitFor(int timeout, Predicate predicate) throws Exception {
if (System.currentTimeMillis() >= mustEnd) {
fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e);
}
LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e);
LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e);
Thread.sleep(400);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public HiveHook() {
@Override
public void run(final HookContext hookContext) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("==> HiveHook.run(" + hookContext + ")");
LOG.debug("==> HiveHook.run({})", hookContext);
}

try {
Expand All @@ -55,7 +55,7 @@ public void run(final HookContext hookContext) throws Exception {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== HiveHook.run(" + hookContext + ")");
LOG.debug("<== HiveHook.run({})", hookContext);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ private Referenceable registerDatabase(String databaseName) throws Exception {
}

private Referenceable createOrUpdateDBInstance(Database hiveDB, Referenceable dbRef) {
LOG.info("Importing objects from databaseName : " + hiveDB.getName());
LOG.info("Importing objects from databaseName : {}", hiveDB.getName());

if (dbRef == null) {
dbRef = new Referenceable(HiveDataTypes.HIVE_DB.getName());
Expand All @@ -206,12 +206,12 @@ private Referenceable createOrUpdateDBInstance(Database hiveDB, Referenceable db
*/
private Referenceable registerInstance(Referenceable referenceable) throws Exception {
String typeName = referenceable.getTypeName();
LOG.debug("creating instance of type " + typeName);
LOG.debug("creating instance of type {}", typeName);

String entityJSON = InstanceSerialization.toJson(referenceable, true);
LOG.debug("Submitting new entity {} = {}", referenceable.getTypeName(), entityJSON);
List<String> guids = getAtlasClient().createEntity(entityJSON);
LOG.debug("created instance for type " + typeName + ", guid: " + guids);
LOG.debug("created instance for type {}, guid: {}", typeName, guids);

return new Referenceable(guids.get(guids.size() - 1), referenceable.getTypeName(), null);
}
Expand Down Expand Up @@ -497,9 +497,9 @@ public static String getStorageDescQFName(String entityQualifiedName) {
private Referenceable registerTable(Referenceable dbReference, Table table) throws Exception {
String dbName = table.getDbName();
String tableName = table.getTableName();
LOG.info("Attempting to register table [" + tableName + "]");
LOG.info("Attempting to register table [{}]", tableName);
Referenceable tableReference = getTableReference(table);
LOG.info("Found result " + tableReference);
LOG.info("Found result {}", tableReference);
if (tableReference == null) {
tableReference = createTableInstance(dbReference, table);
tableReference = registerInstance(tableReference);
Expand All @@ -514,7 +514,7 @@ private Referenceable registerTable(Referenceable dbReference, Table table) thro

private void updateInstance(Referenceable referenceable) throws AtlasServiceException {
String typeName = referenceable.getTypeName();
LOG.debug("updating instance of type " + typeName);
LOG.debug("updating instance of type {}", typeName);

String entityJSON = InstanceSerialization.toJson(referenceable, true);
LOG.debug("Updating entity {} = {}", referenceable.getTypeName(), entityJSON);
Expand All @@ -524,13 +524,13 @@ private void updateInstance(Referenceable referenceable) throws AtlasServiceExce

public Referenceable fillStorageDesc(StorageDescriptor storageDesc, String tableQualifiedName,
String sdQualifiedName, Id tableId) throws Exception {
LOG.debug("Filling storage descriptor information for " + storageDesc);
LOG.debug("Filling storage descriptor information for {}", storageDesc);

Referenceable sdReferenceable = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName());
sdReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, sdQualifiedName);

SerDeInfo serdeInfo = storageDesc.getSerdeInfo();
LOG.debug("serdeInfo = " + serdeInfo);
LOG.debug("serdeInfo = {}", serdeInfo);
// SkewedInfo skewedInfo = storageDesc.getSkewedInfo();

String serdeInfoName = HiveDataTypes.HIVE_SERDE.getName();
Expand Down Expand Up @@ -594,7 +594,7 @@ public List<Referenceable> getColumns(List<FieldSchema> schemaList, Referenceabl
List<Referenceable> colList = new ArrayList<>();
int columnPosition = 0;
for (FieldSchema fs : schemaList) {
LOG.debug("Processing field " + fs);
LOG.debug("Processing field {}", fs);
Referenceable colReferenceable = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName());
colReferenceable.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME,
getColumnQualifiedName((String) tableReference.get(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME), fs.getName()));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,7 @@ protected void waitFor(int timeout, Predicate predicate) throws Exception {
if (System.currentTimeMillis() >= mustEnd) {
fail("Assertions failed. Failing after waiting for timeout " + timeout + " msecs", e);
}
LOG.debug("Waiting up to " + (mustEnd - System.currentTimeMillis()) + " msec as assertion failed", e);
LOG.debug("Waiting up to {} msec as assertion failed", mustEnd - System.currentTimeMillis(), e);
Thread.sleep(5000);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ public SqoopHook() {
@Override
public void publish(SqoopJobDataPublisher.Data data) throws Exception {
if (LOG.isDebugEnabled()) {
LOG.debug("==> SqoopHook.run(" + data + ")");
LOG.debug("==> SqoopHook.run({})", data);
}

try {
Expand All @@ -54,7 +54,7 @@ public void publish(SqoopJobDataPublisher.Data data) throws Exception {
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== SqoopHook.run(" + data + ")");
LOG.debug("<== SqoopHook.run({})", data);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public StormAtlasHook() {
public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology stormTopology)
throws IllegalAccessException {
if (LOG.isDebugEnabled()) {
LOG.debug("==> StormAtlasHook.notify(" + topologyInfo + ", " + stormConf + ", " + stormTopology + ")");
LOG.debug("==> StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
}

try {
Expand All @@ -60,7 +60,7 @@ public void notify(TopologyInfo topologyInfo, Map stormConf, StormTopology storm
}

if (LOG.isDebugEnabled()) {
LOG.debug("<== StormAtlasHook.notify(" + topologyInfo + ", " + stormConf + ", " + stormTopology + ")");
LOG.debug("<== StormAtlasHook.notify({}, {}, {})", topologyInfo, stormConf, stormTopology);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,15 @@ public static AtlasAuthorizer getAtlasAuthorizer() throws AtlasAuthorizationExce
}

if (isDebugEnabled) {
LOG.debug("Initializing Authorizer :: " + authorizerClass);
LOG.debug("Initializing Authorizer :: {}", authorizerClass);
}
try {
Class authorizerMetaObject = Class.forName(authorizerClass);
if (authorizerMetaObject != null) {
INSTANCE = (AtlasAuthorizer) authorizerMetaObject.newInstance();
}
} catch (Exception e) {
LOG.error("Error while creating authorizer of type '" + authorizerClass + "'", e);
LOG.error("Error while creating authorizer of type '{}", authorizerClass, e);
throw new AtlasAuthorizationException("Error while creating authorizer of type '"
+ authorizerClass + "'", e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public class AtlasAuthorizationUtils {

public static String getApi(String contextPath) {
if (isDebugEnabled) {
LOG.debug("==> getApi from " + contextPath);
LOG.debug("==> getApi from {}", contextPath);
}
if (contextPath.startsWith(BASE_URL)) {
contextPath = contextPath.substring(BASE_URL.length());
Expand Down Expand Up @@ -74,14 +74,14 @@ public static AtlasActionTypes getAtlasAction(String method) {
break;
default:
if (isDebugEnabled) {
LOG.debug("getAtlasAction(): Invalid HTTP method '" + method + "'");
LOG.debug("getAtlasAction(): Invalid HTTP method '{}", method);
}
break;
}

if (isDebugEnabled) {
LOG.debug("<== AtlasAuthorizationFilter getAtlasAction HTTP Method " + method + " mapped to AtlasAction : "
+ action);
LOG.debug("<== AtlasAuthorizationFilter getAtlasAction HTTP Method {} mapped to AtlasAction : {}",
method, action);
}
return action;
}
Expand All @@ -96,15 +96,15 @@ public static AtlasActionTypes getAtlasAction(String method) {
* entities,lineage and discovery apis are mapped with AtlasResourceTypes.ENTITY eg :- /api/atlas/lineage/hive/table/*
* /api/atlas/entities/{guid}* /api/atlas/discovery/*
*
* taxonomy API are also mapped to AtlasResourceTypes.TAXONOMY & AtlasResourceTypes.ENTITY and its terms APIs have
* taxonomy API are also mapped to AtlasResourceTypes.TAXONOMY & AtlasResourceTypes.ENTITY and its terms APIs have
* added AtlasResourceTypes.TERM associations.
*
*
* unprotected types are mapped with AtlasResourceTypes.UNKNOWN, access to these are allowed.
*/
public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) {
Set<AtlasResourceTypes> resourceTypes = new HashSet<>();
if (isDebugEnabled) {
LOG.debug("==> getAtlasResourceType for " + contextPath);
LOG.debug("==> getAtlasResourceType for {}", contextPath);
}
String api = getApi(contextPath);
if (api.startsWith("types")) {
Expand All @@ -125,13 +125,13 @@ public static Set<AtlasResourceTypes> getAtlasResourceType(String contextPath) {
resourceTypes.add(AtlasResourceTypes.TERM);
}
} else {
LOG.error("Unable to find Atlas Resource corresponding to : " + api + "\nSetting "
+ AtlasResourceTypes.UNKNOWN.name());
LOG.error("Unable to find Atlas Resource corresponding to : {}\nSetting {}"
, api, AtlasResourceTypes.UNKNOWN.name());
resourceTypes.add(AtlasResourceTypes.UNKNOWN);
}

if (isDebugEnabled) {
LOG.debug("<== Returning AtlasResource/s " + resourceTypes + " for api " + api);
LOG.debug("<== Returning AtlasResources {} for api {}", resourceTypes, api);
}
return resourceTypes;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ private List<AtlasActionTypes> getListOfAutorities(String auth) {

default:
if (LOG.isErrorEnabled()) {
LOG.error("Invalid action: '" + access + "'");
LOG.error("Invalid action: '{}'", access);
}
break;
}
Expand Down Expand Up @@ -108,7 +108,7 @@ private PolicyDef parsePolicy(String data) {
String[] props = data.split(";;");

if (props.length < RESOURCE_INDEX) {
LOG.warn("skipping invalid policy line: " + data);
LOG.warn("skipping invalid policy line: {}", data);
} else {
def = new PolicyDef();
def.setPolicyName(props[POLICYNAME]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,15 +36,14 @@ public class PolicyUtil {
public Map<String, Map<AtlasResourceTypes, List<String>>> createPermissionMap(List<PolicyDef> policyDefList,
AtlasActionTypes permissionType, SimpleAtlasAuthorizer.AtlasAccessorTypes principalType) {
if (isDebugEnabled) {
LOG.debug("==> PolicyUtil createPermissionMap" + "\nCreating Permission Map for :: " + permissionType
+ " & " + principalType);
LOG.debug("==> PolicyUtil createPermissionMap\nCreating Permission Map for :: {} & {}", permissionType, principalType);
}
Map<String, Map<AtlasResourceTypes, List<String>>> userReadMap =
new HashMap<>();

// Iterate over the list of policies to create map
for (PolicyDef policyDef : policyDefList) {
LOG.info("Processing policy def : " + policyDef);
LOG.info("Processing policy def : {}", policyDef);
Map<String, List<AtlasActionTypes>> principalMap =
principalType.equals(SimpleAtlasAuthorizer.AtlasAccessorTypes.USER) ? policyDef.getUsers() : policyDef
.getGroups();
Expand All @@ -61,7 +60,7 @@ public Map<String, Map<AtlasResourceTypes, List<String>>> createPermissionMap(Li
// If its not added then create a new resource list
if (userResourceList == null) {
if (isDebugEnabled) {
LOG.debug("Resource list not found for " + username + ", creating it");
LOG.debug("Resource list not found for {}, creating it", username);
}
userResourceList = new HashMap<>();
}
Expand Down Expand Up @@ -89,11 +88,11 @@ public Map<String, Map<AtlasResourceTypes, List<String>>> createPermissionMap(Li
userResourceList.put(type, resourceList);
}
userReadMap.put(username, userResourceList);
LOG.info("userReadMap " + userReadMap);
LOG.info("userReadMap {}", userReadMap);
}
}
if (isDebugEnabled) {
LOG.debug("Returning Map for " + principalType + " :: " + userReadMap);
LOG.debug("Returning Map for {} :: {}", principalType, userReadMap);
LOG.debug("<== PolicyUtil createPermissionMap");
}
return userReadMap;
Expand Down
Loading

0 comments on commit 40e639e

Please sign in to comment.