diff --git a/gcov_support.sh b/gcov_support.sh new file mode 100755 index 0000000000..baff812d3a --- /dev/null +++ b/gcov_support.sh @@ -0,0 +1,456 @@ +#!/bin/bash +## This script is to enable the gcov support of the SONiC source codes +work_dir=$(pwd) +env_home=$HOME + +GCNO_LIST_FILE="gcno_file_list.txt" +GCDA_DIR_LIST="gcda_dir_list.txt" +TMP_GCDA_FILE_LIST="tmp_gcda_file_list.txt" +GCNO_ALL_TAR_GZ="gcno.tar.gz" +GCDA_ALL_TAR_GZ="gcda.tar.gz" + +# PATH for all modules +SWSS_PATH=${work_dir}/src/sonic-swss/ +SWSS_COMMON_PATH=${work_dir}/src/sonic-swss-common/ + +INFO_DIR=info +HTML_DIR=html +ALLMERGE_DIR=AllMergeReport + +GCOV_OUTPUT=${work_dir}/gcov_output +GCOV_INFO_OUTPUT=${GCOV_OUTPUT}/${INFO_DIR} +GCOV_HTML_OUTPUT=${GCOV_OUTPUT}/${HTML_DIR} +GCOV_MERGE_REPORT_OUTPUT=${GCOV_OUTPUT}/${ALLMERGE_DIR} + +HTML_FILE_PREFIX="GCOVHTML_" +HTML_FILE_LIST=${GCOV_OUTPUT}/htmllist +INFO_FILE_PREFIX="GCOVINFO_" +INFO_FILE_LIST=${GCOV_OUTPUT}/infolist +INFO_ERR_LIST=${work_dir}/info_err_list + +# change the env_var $HOME to a temporary one to ensure genhtml's reliability +add_new_home_for_sonic() +{ + local new_home=`pwd` + + export HOME=${new_home} + echo "new_home is $HOME" +} + +# reset the env_var $HOME +reset_home() +{ + export HOME=${env_home} + echo "reset to original home: $HOME" +} + +# reset compiling environment +gcov_support_clean() +{ + find ${work_dir} -name $INFO_FILE_PREFIX* | xargs rm -rf + find ${work_dir} -name $HTML_FILE_PREFIX* | xargs rm -rf + find ${work_dir} -name *.gcno | xargs rm -rf + find ${work_dir} -name *.gcda | xargs rm -rf + find ${work_dir} -name $TMP_GCDA_FILE_LIST | xargs rm -rf +} + +# verify whether the info file generated is valid +verify_info_file() +{ + local file=$1 + local path=$2 + local FILE_OK=`grep "FN:" ${file} | wc -l` + if [ $FILE_OK -lt 1 ] ;then + echo ${path}/${file} >> ${INFO_ERR_LIST} + rm ${file} + fi +} + +# search and save the dir where the lcov should be implemented +list_lcov_path() +{ + local find_gcda_file + local gcda_dir=$1 + + TMP_FILE=${gcda_dir}/tmpgcdalist + echo "Start searching .gcda files..." + exec 4>$TMP_FILE + find_gcda_file=`find ${gcda_dir} -name *.gcda` + echo "${find_gcda_file}" + RESULT=${find_gcda_file} + echo "$RESULT" >&4 + exec 4>&- + # save the dirnames where the .gcda files exist + cat ${TMP_FILE} | xargs dirname | uniq > ${GCDA_DIR_LIST} + rm ${TMP_FILE} +} + +# change the sf keys to absolute path +modify_sf_keys() +{ + local infofile=$1 + lcov --extract ${infofile} '*sonic/*' -o ${infofile} + sed -i "s|SF:/sonic|SF:$HOME|g" ${infofile} +} + +# generate gcov base info and html report for specified range files +lcov_genhtml_report() +{ + local gcda_file_range=$1 + list_lcov_path ${gcda_file_range} + + while read line + do + local fullpath=$line + local infoname=${INFO_FILE_PREFIX}${fullpath##*/}.info + htmldirname=${HTML_FILE_PREFIX}${fullpath##*/} + + echo "###ENTERING ${fullpath}" + cd ${fullpath} + GCDA_COUNT=`find -name "*.gcda" | wc -l` + echo "gcda count: $GCDA_COUNT" + if [ $GCDA_COUNT -ge 1 ]; then + echo "Executing lcov -c -d . -o ${infoname}" + lcov -c -d . -o ${infoname} 2>/dev/null + if [ "$?" != "0" ]; then + echo "lcov fail!" + rm ${infoname} + fi + verify_info_file ${infoname} ${fullpath} + #modify_sf_keys_backup ${infoname} + modify_sf_keys ${infoname} + fi + + # generate html report + local SOURCE_CODE_COUNT=`find . -name "*\.[c|cpp]" | wc -l` + if [ $SOURCE_CODE_COUNT -lt 1 ]; then + genhtml -o ${htmldirname} -t ${fullpath##*/} --no-source *.info + else + echo "Executing genhtml..." + genhtml -o ${htmldirname} -t ${fullpath##*/} *.info + fi + done < ${GCDA_DIR_LIST} + + cd ${work_dir} +} + +# generate html reports for all eligible submodules +lcov_genhtml_all() +{ + local work_directory = `pwd` + + echo " === Start generating all gcov reports === " + lcov_genhtml_report ${work_directory} +} + +lcov_merge_all() +{ + local project_c_source + local all_info_files + + # check c/cpp source files + project_c_source=`find -name "*\.[c|cpp]" 2>/dev/null | wc -l` + + pushd $GCOV_OUTPUT + if [ ! -d ${GCOV_MERGE_REPORT_OUTPUT} ]; then + mkdir -p ${GCOV_MERGE_REPORT_OUTPUT} + fi + + all_info_files=`find . -name *\.info` + if [ ${project_c_source} -lt 1 ]; then + echo "############# build reports without sources ###############" + genhtml -o $GCOV_MERGE_REPORT_OUTPUT --no-source ${all_info_files} + else + echo "############# build reports with sources ##################" + genhtml -o $GCOV_MERGE_REPORT_OUTPUT ${all_info_files} + fi + popd +} + +get_info_file() +{ + echo "### Start collecting info files generated by lcov" + find -name "${INFO_FILE_PREFIX}*" > ${INFO_FILE_LIST} + + while read line + do + local info_file=${line} + local FromFullpath=${line%/*} + local ToFullpath + + if [ ! -d ${GCOV_INFO_OUTPUT} ]; then + mkdir -p ${GCOV_INFO_OUTPUT} + fi + pushd ${GCOV_INFO_OUTPUT} + mkdir -p ${FromFullpath} + popd + ToFullpath=${GCOV_INFO_OUTPUT}/${FromFullpath#*/} + mv ${info_file} ${ToFullpath} + done < ${INFO_FILE_LIST} +} + +get_html_file() +{ + echo "### Start collecting html files generated by genhtml" + find -name "${HTML_FILE_PREFIX}*" > ${HTML_FILE_LIST} + + while read line + do + local html_report=${line} + local FromFullpath=${line%/*} + local ToFullpath + + if [ ! -d ${GCOV_HTML_OUTPUT} ]; then + mkdir -p ${GCOV_HTML_OUTPUT} + fi + pushd ${GCOV_HTML_OUTPUT} + mkdir -p ${FromFullpath} + popd + ToFullpath=${GCOV_HTML_OUTPUT}/${FromFullpath#*/} + mv ${html_report} ${ToFullpath} + done < ${HTML_FILE_LIST} +} + +tar_gcov_output() +{ + local time_stamp + + time_stamp=$(date "+%Y%m%d%H%M") + tar -czvf ${time_stamp}_SONiC_gcov_report.tar.gz ${GCOV_OUTPUT} +} + +rm_vs_cont() +{ + local para_vs + local cont_id + local result + + docker ps -a > tmp_docker.txt + while read LINE ; do + para_vs=`echo ${LINE} | awk '{print $2}' | awk -F : '{print $1}'` + if [[ ${para_vs} = "docker-sonic-vs" ]]; then + cont_id=`echo ${LINE} | awk '{print $1}'` + docker stop ${cont_id} + docker rm ${cont_id} + break + fi + done < tmp_docker.txt + rm tmp_docker.txt +} + +collect_merged_report() +{ + get_info_file + get_html_file + lcov_merge_all + cp $INFO_ERR_LIST $GCOV_OUTPUT + cp $GCDA_DIR_LIST $GCOV_OUTPUT + tar_gcov_output +} + +gcov_support_generate_html() +{ + gcov_support_clean + + add_new_home_for_sonic + pushd $HOME + if [ ! -f "${GCNO_ALL_TAR_GZ}" ]; then + echo "### Fail! Cannot find ${GCNO_ALL_TAR_GZ}, please check." + return -1 + fi + + if [ ! -f "${GCDA_ALL_TAR_GZ}" ]; then + echo "### Fail! Cannot find ${GCDA_ALL_TAR_GZ}, please check." + return -1 + fi + + echo "### Extract .gcda and .gcno files..." + tar -zxvf $GCDA_ALL_TAR_GZ + tar -zxvf $GCNO_ALL_TAR_GZ + popd + + # remove old output dir + rm -rf ${work_dir}/gcov_output + mkdir -p $GCOV_OUTPUT + + submodule_name=$1 + lcov_genhtml_all + if [ "$?" != "0" ]; then + echo "###lcov operation fail.." + return -1 + fi + + # collect gcov output + collect_merged_report + reset_home + rm_vs_cont + echo "### Make $1 gcovhtml completed !!" +} + +# list and save the generated .gcda files +gcov_support_collect_gcda() +{ + local gcda_files_count + #sudo su + + pushd / + # check whether .gcda files exist + gcda_files_count=`sudo find -name "*\.gcda" 2>/dev/null | wc -l` + if [ ${gcda_files_count} -lt 1 ]; then + echo "### Error! no gcda files found!" + return -1 + fi + + #pushd /sys/kernel/debug/gcov + #sudo cp -r sonic/ / + #popd + + pushd /sonic + sudo tar -zcvf /usr/local/bin/gcda.tar.gz * + popd + + popd + +} + +collect_gcda_files() +{ + local str_exit="Exited" + local para_vs + local cont_id + local result + + docker ps -a > tmp_docker.txt + while read LINE ; do + result=`echo ${LINE} | grep ${str_exit}` + if [[ ${result} = "" ]]; then + para_vs=`echo ${LINE} | awk '{print $2}' | awk -F : '{print $1}'` + if [[ ${para_vs} = "docker-sonic-vs" ]]; then + cont_id=`echo ${LINE} | awk '{print $1}'` + docker cp gcov_support.sh ${cont_id}:/ + docker exec ${cont_id} /bin/bash -c './gcov_support.sh collect_gcda' + docker cp ${cont_id}:/user/local/bin/gcda.tar.gz ./ + break + fi + + fi + done < tmp_docker.txt + rm tmp_docker.txt +} + +# list and save the generated .gcno files +gcov_support_collect_gcno() +{ + local find_command + local tar_command + + if [ -f "$GCNO_ALL_TAR_GZ" ]; then + rm -rf $GCNO_ALL_TAR_GZ + fi + + # rename .tmp*_gcno files generated + for tmp_gcno in `find -name .tmp_*.gcno` + do + new_gcno=`echo ${tmp_gcno} | sed 's/.tmp_//g'` + echo ${new_gcno} + mv ${tmp_gcno} ${new_gcno} + done + + echo " === Start collecting .gcno files... === " + exec 3>$GCNO_LIST_FILE + find_command=`find -name *.gcno` + echo "${find_command}" + if [ -z "${find_command}" ]; then + echo "### Error! no gcno files found!" + return -1 + fi + RESULT=${find_command} + echo "$RESULT" >&3 + exec 3>&- + + local filesize=`ls -l $GCNO_LIST_FILE | awk '{print $5}'` + # Empty gcno_file_list indicates the non-gcov compling mode + if [ ${filesize} -le 1 ]; then + echo "empty gcno_file_list.txt" + rm $GCNO_LIST_FILE + else + echo " === Output archive file... === " + tar_command="tar -T $GCNO_LIST_FILE -zcvf $GCNO_ALL_TAR_GZ" + echo "${tar_command}" + ${tar_command} + mv $GCNO_ALL_TAR_GZ ${work_dir}/target + rm $GCNO_LIST_FILE + echo " === Collect finished...leave $GCNO_ALL_TAR_GZ === " + fi +} + + +gcov_support_add_to_swss() +{ + pushd $SWSS_PATH + sed -i '/AC_SUBST(CFLAGS_COMMON)/i\CFLAGS_COMMON+=" -fprofile-arcs -ftest-coverage"' configure.ac + sed -i '/AC_SUBST(CFLAGS_COMMON)/a\LDFLAGS+=" -fprofile-arcs"' configure.ac + sed -i '/LDFLAGS+=" -fprofile-arcs"/a\AC_SUBST(LDFLAGS)' configure.ac + popd + + pushd $SWSS_COMMON_PATH + sed -i '/override_dh_auto_install/i\\tdh_auto_configure -- --enable-gcov' debian/rules + sed -i '/dh_auto_configure -- --enable-gcov/i\override_dh_auto_configure:' debian/rules + sed -i '/override_dh_auto_install/i\\r' debian/rules + popd +} + +gcov_support_init_all() +{ + gcov_support_add_to_swss +} + +# get and apply gcov compiling option +gcov_support_init() +{ + echo " === Start init gcov support to ===" + gcov_support_init_all + if [ "$?" != "0" ]; then + echo "### lcov operation fail.. " + return -1 + fi +} + +main() +{ + case $1 in + init) + gcov_support_init + ;; + collect) + gcov_support_collect_gcno + ;; + collect_gcda) + gcov_support_collect_gcda + ;; + collect_gcda_files) + collect_gcda_files + ;; + generate) + gcov_support_generate_html + ;; + clean) + gcov_support_clean + ;; + tar_output) + tar_gcov_output + ;; + *) + echo "Usage:" + echo " init initialize gcov compiling environment" + echo " collect collect .gcno files" + echo " collect_gcda collect .gcda files" + echo " generate generate gcov report in html form (all or submodule_name)" + echo " clean reset environment" + echo " tar_output tar gcov_output forder" + esac +} + +main $1 +exit + diff --git a/jenkins/vs/buildimage-vs-all-pr/Jenkinsfile b/jenkins/vs/buildimage-vs-all-pr/Jenkinsfile index f08b0f7829..005d17135b 100644 --- a/jenkins/vs/buildimage-vs-all-pr/Jenkinsfile +++ b/jenkins/vs/buildimage-vs-all-pr/Jenkinsfile @@ -33,8 +33,10 @@ echo ${JOB_NAME##*/}.${BUILD_NUMBER} CACHE_OPTIONS="SONIC_DPKG_CACHE_METHOD=rcache SONIC_DPKG_CACHE_SOURCE=/nfs/dpkg_cache/vs" make configure PLATFORM=vs +./gcov_support.sh init all make SONIC_CONFIG_BUILD_JOBS=1 $CACHE_OPTIONS target/docker-sonic-vs.gz sudo cp target/docker-sonic-vs.gz /nfs/jenkins/docker-sonic-vs.${JOB_NAME##*/}.${BUILD_NUMBER}.gz +./gcov_support.sh collect ''' } } diff --git a/jenkins/vs/buildimage-vs-all/Jenkinsfile b/jenkins/vs/buildimage-vs-all/Jenkinsfile index 4eee6db0c0..a73ae5c4fe 100644 --- a/jenkins/vs/buildimage-vs-all/Jenkinsfile +++ b/jenkins/vs/buildimage-vs-all/Jenkinsfile @@ -40,8 +40,10 @@ git submodule foreach --recursive '[ -f .git ] && echo "gitdir: $(realpath --rel CACHE_OPTIONS="SONIC_DPKG_CACHE_METHOD=wcache SONIC_DPKG_CACHE_SOURCE=/nfs/dpkg_cache/vs" make configure PLATFORM=vs +./gcov_support.sh init all make SONIC_CONFIG_BUILD_JOBS=1 $CACHE_OPTIONS target/docker-sonic-vs.gz sudo cp target/docker-sonic-vs.gz /nfs/jenkins/docker-sonic-vs.${JOB_NAME##*/}.${BUILD_NUMBER}.gz +./gcov_support.sh collect ''' } } diff --git a/jenkins/vs/sonic-swss-build-pr/Jenkinsfile b/jenkins/vs/sonic-swss-build-pr/Jenkinsfile index 2127859f29..a0e74e3583 100644 --- a/jenkins/vs/sonic-swss-build-pr/Jenkinsfile +++ b/jenkins/vs/sonic-swss-build-pr/Jenkinsfile @@ -12,7 +12,7 @@ pipeline { } copyArtifacts(projectName: 'vs/sonic-sairedis-build', filter: '**/*.deb', target: 'sairedis', flatten: true) copyArtifacts(projectName: 'common/sonic-swss-common-build', filter: '**/*.deb', target: 'common', flatten: true) - copyArtifacts(projectName: 'vs/buildimage-vs-all', filter: '**/*.deb,**/docker-sonic-vs.gz', target: 'buildimage', flatten: false) + copyArtifacts(projectName: 'vs/buildimage-vs-all', filter: '**/*.deb,**/docker-sonic-vs.gz,**/gcno.tar.gz', target: 'buildimage', flatten: false) } } @@ -40,6 +40,17 @@ pipeline { } } post { + always { + publishHTML(target: [ + allowMissing: false, + alwaysLinkToLastBuild: false, + keepAll: true, + reportDir: '/gcov_output/AllMergeReport', + reportFiles: 'index.html', + reportName: 'Gcov Report' + ]) + } + cleanup { archiveArtifacts(artifacts: 'buildimage/target/docker-sonic-vs.gz, swss/tests/log/**, swss_*.deb, swss-dbg_*.deb, swss/tests/*.py') junit(allowEmptyResults: true, keepLongStdio: true, testResults: 'swss/tests/tr.xml') diff --git a/jenkins/vs/sonic-swss-build/Jenkinsfile b/jenkins/vs/sonic-swss-build/Jenkinsfile index 152c9252df..3d19967172 100644 --- a/jenkins/vs/sonic-swss-build/Jenkinsfile +++ b/jenkins/vs/sonic-swss-build/Jenkinsfile @@ -19,7 +19,7 @@ pipeline { } copyArtifacts(projectName: 'vs/sonic-sairedis-build', filter: '**/*.deb', target: 'sairedis', flatten: true) copyArtifacts(projectName: 'common/sonic-swss-common-build', filter: '**/*.deb', target: 'common', flatten: true) - copyArtifacts(projectName: 'vs/buildimage-vs-all', filter: '**/*.deb,**/docker-sonic-vs.gz', target: 'buildimage', flatten: false) + copyArtifacts(projectName: 'vs/buildimage-vs-all', filter: '**/*.deb,**/docker-sonic-vs.gz,**/gcno.tar.gz', target: 'buildimage', flatten: false) } } @@ -52,6 +52,14 @@ pipeline { archiveArtifacts(artifacts: 'buildimage/target/docker-sonic-vs.gz, swss/tests/log/**, swss_*.deb, swss-dbg_*.deb, swss/tests/*.py') junit(allowEmptyResults: true, keepLongStdio: true, testResults: 'swss/tests/tr.xml') cleanWs(disableDeferredWipeout: false, deleteDirs: true, notFailBuild: true) + publishHTML(target: [ + allowMissing: false, + alwaysLinkToLastBuild: false, + keepAll: true, + reportDir: '/gcov_output/AllMergeReport', + reportFiles: 'index.html', + reportName: 'Gcov Report' + ]) } fixed { slackSend(color:'#00FF00', message: "Build job back to normal: ${env.JOB_NAME} ${env.BUILD_NUMBER} (<${env.BUILD_URL}|Open>)") diff --git a/scripts/vs/sonic-swss-build/test.sh b/scripts/vs/sonic-swss-build/test.sh index f31d5ce983..1adb9ef278 100755 --- a/scripts/vs/sonic-swss-build/test.sh +++ b/scripts/vs/sonic-swss-build/test.sh @@ -7,5 +7,11 @@ cleanup() { trap cleanup ERR pushd swss/tests -sudo py.test -v --force-flaky --junitxml=tr.xml --imgname=docker-sonic-vs:${JOB_NAME##*/}.${BUILD_NUMBER} +#sudo py.test -v --force-flaky --junitxml=tr.xml --imgname=docker-sonic-vs:${JOB_NAME##*/}.${BUILD_NUMBER} +sudo py.test -v --force-flaky --junitxml=tr.xml --keeptb --imgname=docker-sonic-vs:${JOB_NAME##*/}.${BUILD_NUMBER} +popd +exec ./gcov_support.sh collect_gcda_files +sudo cp buildimage/gcno.tar.gz . +exec ./gcov_support.sh generate all + cleanup