Set exit code in op benchmark ci, test=document_fix (#29045)

musl/disable_test_yolov3_temporarily
LoveAn 5 years ago committed by GitHub
parent 40f5453725
commit fef0a81c1e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -55,6 +55,7 @@ def load_benchmark_result_from_logs_dir(logs_dir):
def compare_benchmark_result(develop_result, pr_result):
"""Compare the differences between devlop and pr.
"""
status = True
develop_speed = develop_result.get("speed")
pr_speed = pr_result.get("speed")
@ -71,6 +72,9 @@ def compare_benchmark_result(develop_result, pr_result):
total_time_diff = (
pr_total_time - develop_total_time) / develop_total_time
if gpu_time_diff > 0.05:
status = False
# TODO(Avin0323): Print all info for making relu of alart.
logging.info("------ OP: %s ------" % pr_result.get("name"))
logging.info("GPU time change: %.5f%% (develop: %.7f -> PR: %.7f)" %
@ -85,7 +89,7 @@ def compare_benchmark_result(develop_result, pr_result):
# TODO(Avin0323): Accuracy need to add.
pass
return True
return status
if __name__ == "__main__":
@ -93,7 +97,7 @@ if __name__ == "__main__":
"""
logging.basicConfig(
level=logging.INFO,
format="[%(pathname)s:%(lineno)d] [%(levelname)s] %(message)s")
format="[%(filename)s:%(lineno)d] [%(levelname)s] %(message)s")
parser = argparse.ArgumentParser()
parser.add_argument(
@ -108,6 +112,8 @@ if __name__ == "__main__":
help="Specify the benchmark result directory of PR branch.")
args = parser.parse_args()
exit_code = 0
develop_result_dict = load_benchmark_result_from_logs_dir(
args.develop_logs_dir)
@ -117,4 +123,7 @@ if __name__ == "__main__":
pr_result = parse_log_file(os.path.join(args.pr_logs_dir, log_file))
if develop_result is None or pr_result is None:
continue
compare_benchmark_result(develop_result, pr_result)
if not compare_benchmark_result(develop_result, pr_result):
exit_code = 8
exit(exit_code)

@ -161,17 +161,20 @@ function run_op_benchmark_test {
# diff benchmakr result and miss op
function summary_problems {
local op_name
local op_name exit_code
python ${PADDLE_ROOT}/tools/check_op_benchmark_result.py \
--develop_logs_dir $(pwd)/logs-develop \
--pr_logs_dir $(pwd)/logs-test_pr
exit_code=$?
for op_name in ${!CHANGE_OP_MAP[@]}
do
if [ -z "${BENCHMARK_OP_MAP[$op_name]}" ]
then
exit_code=8
LOG "[WARNING] Missing test script of \"${op_name}\" in benchmark."
fi
done
[ $exit_code -ne 0 ] && exit $exit_code
}
function main {

Loading…
Cancel
Save