diff --git a/chainerui/tasks/crawl_result.py b/chainerui/tasks/crawl_result.py index a92b08f1..810972f0 100644 --- a/chainerui/tasks/crawl_result.py +++ b/chainerui/tasks/crawl_result.py @@ -65,7 +65,7 @@ def _check_log_updated(result): return False -def crawl_result(result, force=False): +def crawl_result(result, force=False, commit=True): """crawl_results.""" now = datetime.datetime.now() @@ -102,6 +102,7 @@ def crawl_result(result, force=False): ) result.updated_at = datetime.datetime.now() - db.session.commit() + if commit: + db.session.commit() return result diff --git a/chainerui/views/result.py b/chainerui/views/result.py index e680814b..c5bdccb9 100644 --- a/chainerui/views/result.py +++ b/chainerui/views/result.py @@ -34,14 +34,16 @@ def get(self, id=None, project_id=None): filter_by(is_unregistered=False).\ all() + # NOTE: To improve performance, aggregate commit phase. By set + # `commit=False`, implicit transaction is not closed, UPDATE query + # is not committed. Consequently a process of serializing does not + # have to call SELECT query again. for result in results: - result = crawl_result(result) + crawl_result(result, commit=False) + rs = [r.serialize_with_sampled_logs(logs_limit) for r in results] + db.session.commit() - return jsonify({ - 'results': [ - r.serialize_with_sampled_logs(logs_limit) for r in results - ] - }) + return jsonify({'results': rs}) else: