Skip to content

Commit

Permalink
optimize code
Browse files Browse the repository at this point in the history
  • Loading branch information
孙永强 committed Jan 6, 2025
1 parent 60b7618 commit 42b04be
Show file tree
Hide file tree
Showing 2 changed files with 27 additions and 28 deletions.
38 changes: 20 additions & 18 deletions events/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,25 +14,27 @@
METRIC_CHANNEL_NAME = "metic-channel"

### metrics decorator
def seasearch_index_timing_decorator(func):
def wrapper(*args, **kwargs):
redis_client = args[4]
publish_metric = {
"metric_name": "seasearch_index_timing",
"instance_name": "seafevents",
"node_name": NODE_NAME,
"details": {
"collected_at": datetime.datetime.now().isoformat()
def handle_metric_decorator(metric_name):
def decorator(func):
def wrapper(*args, **kwargs):
if not ENABLE_METRIC:
return func(*args, **kwargs)
publish_metric = {
"metric_name": metric_name,
"instance_name": "seafevents",
"node_name": NODE_NAME,
"details": {
"collected_at": datetime.datetime.now().isoformat()
}
}
}
start_time = time.time()
func(*args, **kwargs)
end_time = time.time()
duration_seconds = end_time - start_time
publish_metric['metric_value'] = round(duration_seconds, 3)
if ENABLE_METRIC:
redis_client.publish(METRIC_CHANNEL_NAME, json.dumps(publish_metric))
return wrapper
start_time = time.time()
func(*args, **kwargs)
end_time = time.time()
duration_seconds = end_time - start_time
publish_metric['metric_value'] = round(duration_seconds, 3)
redis_cache.publish(METRIC_CHANNEL_NAME, json.dumps(publish_metric))
return wrapper
return decorator


def format_metrics(cache):
Expand Down
17 changes: 7 additions & 10 deletions seasearch/index_task/filename_index_updater.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@
from seafevents.seasearch.utils.seasearch_api import SeaSearchAPI
from seafevents.repo_data import repo_data
from seafevents.utils import parse_bool, get_opt_from_conf_or_env, parse_interval
from seafevents.events.metrics import seasearch_index_timing_decorator
from seafevents.app.event_redis import RedisClient
from seafevents.events.metrics import handle_metric_decorator


logger = logging.getLogger(__name__)
Expand All @@ -28,7 +27,6 @@ def __init__(self, config):
self._repo_filename_index = None
self._index_manager = None
self._parse_config(config)
self.redis_client = RedisClient(config)

def _parse_config(self, config):
"""Parse filename index update related parts of events.conf"""
Expand Down Expand Up @@ -90,8 +88,7 @@ def start(self):
self._repo_filename_index,
self._index_manager,
self._repo_data,
self._interval,
self.redis_client
self._interval
).start()


Expand All @@ -109,8 +106,9 @@ def clear_deleted_repo(repo_status_filename_index, repo_filename_index, index_ma
logger.info('Repo %s has been deleted from filename index.' % repo_id)
logger.info("filename index deleted repo has been cleared")

@seasearch_index_timing_decorator
def update_repo_file_name_indexes(repo_status_filename_index, repo_filename_index, index_manager, repo_data, redis_client):

@handle_metric_decorator('seasearch_index_timing')
def update_repo_file_name_indexes(repo_status_filename_index, repo_filename_index, index_manager, repo_data):
start, count = 0, 1000
all_repos = []

Expand Down Expand Up @@ -143,21 +141,20 @@ def update_repo_file_name_indexes(repo_status_filename_index, repo_filename_inde


class RepoFilenameIndexUpdaterTimer(Thread):
def __init__(self, repo_status_filename_index, repo_filename_index, index_manager, repo_data, interval, redis_client):
def __init__(self, repo_status_filename_index, repo_filename_index, index_manager, repo_data, interval):
super(RepoFilenameIndexUpdaterTimer, self).__init__()
self.repo_status_filename_index = repo_status_filename_index
self.repo_filename_index = repo_filename_index
self.index_manager = index_manager
self.repo_data = repo_data
self.interval = interval
self.redis_client = redis_client

def run(self):
sched = GeventScheduler()
logging.info('Start to update filename index...')
try:
sched.add_job(update_repo_file_name_indexes, IntervalTrigger(seconds=self.interval),
args=(self.repo_status_filename_index, self.repo_filename_index, self.index_manager, self.repo_data, self.redis_client))
args=(self.repo_status_filename_index, self.repo_filename_index, self.index_manager, self.repo_data))
except Exception as e:
logging.exception('periodical update filename index error: %s', e)

Expand Down

0 comments on commit 42b04be

Please sign in to comment.