diff --git a/test-case/check-pause-release.sh b/test-case/check-pause-release.sh new file mode 100755 index 00000000..848501f4 --- /dev/null +++ b/test-case/check-pause-release.sh @@ -0,0 +1,133 @@ +#!/bin/bash + +set -e + +## +## Case Name: check-pause-release +## Preconditions: +## N/A +## Description: +## playback/capture on each pipeline and trigger pause/release with expect +## expect sleep for sleep time then mocks spacebar keypresses ' ' to +## cause release action +## Case step: +## 1. aplay/arecord on PCM +## 2. use expect to trigger pause/release +## Expect result: +## no error happen for aplay/arecord +## + +# shellcheck source=case-lib/lib.sh +source "$(dirname "${BASH_SOURCE[0]}")"/../case-lib/lib.sh + +OPT_NAME['t']='tplg' OPT_DESC['t']='tplg file, default value is env TPLG: $''TPLG' +OPT_HAS_ARG['t']=1 OPT_VAL['t']="$TPLG" + +OPT_NAME['m']='mode' OPT_DESC['m']='test mode' +OPT_HAS_ARG['m']=1 OPT_VAL['m']='playback' + +OPT_NAME['c']='count' OPT_DESC['c']='pause/release repeat count' +OPT_HAS_ARG['c']=1 OPT_VAL['c']=10 + +OPT_NAME['f']='file' OPT_DESC['f']='file name' +OPT_HAS_ARG['f']=1 OPT_VAL['f']='' + +OPT_NAME['i']='min' OPT_DESC['i']='random range min value, unit is ms' +OPT_HAS_ARG['i']=1 OPT_VAL['i']='100' + +OPT_NAME['a']='max' OPT_DESC['a']='random range max value, unit is ms' +OPT_HAS_ARG['a']=1 OPT_VAL['a']='200' + +OPT_NAME['s']='sof-logger' OPT_DESC['s']="Open sof-logger trace the data will store at $LOG_ROOT" +OPT_HAS_ARG['s']=0 OPT_VAL['s']=1 + +OPT_NAME['S']='filter_string' OPT_DESC['S']="run this case on specified pipelines" +OPT_HAS_ARG['S']=1 OPT_VAL['S']="id:any" + +func_opt_parse_option "$@" +setup_kernel_check_point + +tplg=${OPT_VAL['t']} +test_mode=${OPT_VAL['m']} +repeat_count=${OPT_VAL['c']} +#TODO: file name salt for capture +file_name=${OPT_VAL['f']} +# configure random value range +rnd_min=${OPT_VAL['i']} +rnd_max=${OPT_VAL['a']} +rnd_range=$(( rnd_max - rnd_min )) +[[ $rnd_range -le 0 ]] && dlogw "Error random range scope [ min:$rnd_min - max:$rnd_max ]" && exit 2 + +case $test_mode in + "playback") + cmd=aplay + cmd_opts="$SOF_APLAY_OPTS" + dummy_file=/dev/zero + ;; + "capture") + cmd=arecord + cmd_opts="$SOF_ARECORD_OPTS" + dummy_file=/dev/null + ;; + *) + die "Invalid test mode: $test_mode (allow value : playback, capture)" + ;; +esac + +logger_disabled || func_lib_start_log_collect + +[[ -z $file_name ]] && file_name=$dummy_file + +func_pipeline_export "$tplg" "type:$test_mode & ${OPT_VAL['S']}" +for idx in $(seq 0 $((PIPELINE_COUNT - 1))) +do + # set up checkpoint for each iteration + setup_kernel_check_point + channel=$(func_pipeline_parse_value "$idx" channel) + rate=$(func_pipeline_parse_value "$idx" rate) + fmt=$(func_pipeline_parse_value "$idx" fmt) + dev=$(func_pipeline_parse_value "$idx" dev) + snd=$(func_pipeline_parse_value "$idx" snd) + + # expect is tcl language script + # expr rand(): produces random numbers between 0 and 1 + # after ms: Ms must be an integer giving a time in milliseconds. + # The command sleeps for ms milliseconds and then returns. + dlogi "Entering expect script with: + $cmd $SOF_ALSA_OPTS $cmd_opts -D $dev -r $rate -c $channel -f $fmt -vv -i $file_name -q" + + expect < $repeat_count } { exit 0 } + exp_continue + } +} +exit 1 +END + ret=$? + #flush the output + echo + if [ $ret -ne 0 ]; then + func_lib_lsof_error_dump "$snd" + sof-process-kill.sh || + dlogw "Kill process catch error" + exit $ret + fi + # check kernel log for each iteration to catch issues + sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || die "Caught error in kernel log" +done diff --git a/test-case/check-pause-resume.sh b/test-case/check-pause-resume.sh deleted file mode 100755 index 170aff75..00000000 --- a/test-case/check-pause-resume.sh +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/bash - -set -e - -## -## Case Name: check-pause-resume -## Preconditions: -## N/A -## Description: -## playback/capture on each pipeline and feak pause/resume with expect -## expect sleep for sleep time then mocks spacebar keypresses ' ' to -## cause resume action -## Case step: -## 1. aplay/arecord on PCM -## 2. use expect to fake pause/resume -## Expect result: -## no error happen for aplay/arecord -## - -# shellcheck source=case-lib/lib.sh -source "$(dirname "${BASH_SOURCE[0]}")"/../case-lib/lib.sh - -OPT_NAME['t']='tplg' OPT_DESC['t']='tplg file, default value is env TPLG: $''TPLG' -OPT_HAS_ARG['t']=1 OPT_VAL['t']="$TPLG" - -OPT_NAME['m']='mode' OPT_DESC['m']='test mode' -OPT_HAS_ARG['m']=1 OPT_VAL['m']='playback' - -OPT_NAME['c']='count' OPT_DESC['c']='pause/resume repeat count' -OPT_HAS_ARG['c']=1 OPT_VAL['c']=10 - -OPT_NAME['f']='file' OPT_DESC['f']='file name' -OPT_HAS_ARG['f']=1 OPT_VAL['f']='' - -OPT_NAME['i']='min' OPT_DESC['i']='random range min value, unit is ms' -OPT_HAS_ARG['i']=1 OPT_VAL['i']='100' - -OPT_NAME['a']='max' OPT_DESC['a']='random range max value, unit is ms' -OPT_HAS_ARG['a']=1 OPT_VAL['a']='200' - -OPT_NAME['s']='sof-logger' OPT_DESC['s']="Open sof-logger trace the data will store at $LOG_ROOT" -OPT_HAS_ARG['s']=0 OPT_VAL['s']=1 - -OPT_NAME['S']='filter_string' OPT_DESC['S']="run this case on specified pipelines" -OPT_HAS_ARG['S']=1 OPT_VAL['S']="id:any" - -func_opt_parse_option "$@" -setup_kernel_check_point - -tplg=${OPT_VAL['t']} -test_mode=${OPT_VAL['m']} -repeat_count=${OPT_VAL['c']} -#TODO: file name salt for capture -file_name=${OPT_VAL['f']} -# configure random value range -rnd_min=${OPT_VAL['i']} -rnd_max=${OPT_VAL['a']} -rnd_range=$(( rnd_max - rnd_min )) -[[ $rnd_range -le 0 ]] && dlogw "Error random range scope [ min:$rnd_min - max:$rnd_max ]" && exit 2 - -case $test_mode in - "playback") - cmd=aplay - cmd_opts="$SOF_APLAY_OPTS" - dummy_file=/dev/zero - ;; - "capture") - cmd=arecord - cmd_opts="$SOF_ARECORD_OPTS" - dummy_file=/dev/null - ;; - *) - die "Invalid test mode: $test_mode (allow value : playback, capture)" - ;; -esac - -logger_disabled || func_lib_start_log_collect - -[[ -z $file_name ]] && file_name=$dummy_file - -func_pipeline_export "$tplg" "type:$test_mode & ${OPT_VAL['S']}" -for idx in $(seq 0 $((PIPELINE_COUNT - 1))) -do - # set up checkpoint for each iteration - setup_kernel_check_point - channel=$(func_pipeline_parse_value "$idx" channel) - rate=$(func_pipeline_parse_value "$idx" rate) - fmt=$(func_pipeline_parse_value "$idx" fmt) - dev=$(func_pipeline_parse_value "$idx" dev) - snd=$(func_pipeline_parse_value "$idx" snd) - - # expect is tcl language script - # expr rand(): produces random numbers between 0 and 1 - # after ms: Ms must be an integer giving a time in milliseconds. - # The command sleeps for ms milliseconds and then returns. - dlogi "Entering expect script with: - $cmd $SOF_ALSA_OPTS $cmd_opts -D $dev -r $rate -c $channel -f $fmt -vv -i $file_name -q" - - expect < $repeat_count } { exit 0 } - exp_continue - } -} -exit 1 -END - ret=$? - #flush the output - echo - if [ $ret -ne 0 ]; then - func_lib_lsof_error_dump "$snd" - sof-process-kill.sh || - dlogw "Kill process catch error" - exit $ret - fi - # check kernel log for each iteration to catch issues - sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || die "Caught error in kernel log" -done diff --git a/test-case/check-pause-resume.sh b/test-case/check-pause-resume.sh new file mode 120000 index 00000000..686bf7bf --- /dev/null +++ b/test-case/check-pause-resume.sh @@ -0,0 +1 @@ +check-pause-release.sh \ No newline at end of file diff --git a/test-case/multiple-pause-release.sh b/test-case/multiple-pause-release.sh new file mode 100755 index 00000000..a15ed8f9 --- /dev/null +++ b/test-case/multiple-pause-release.sh @@ -0,0 +1,198 @@ +#!/bin/bash + +set -e + +## +## Case Name: Run multiple pipeline for pause release +## Preconditions: +## N/A +## Description: +## pickup multiple pipline to do pause release +## trigger pause/release with expect +## expect sleep for sleep time then mocks spacebar keypresses ' ' to +## cause release action +## Case step: +## 1. run 1st pipeline +## 2. pickup any other pipeline +## 3. use expect to trigger pause/release in each pipeline +## 4. go through with tplg file +## Expect result: +## no errors occur for either process +## + +# shellcheck source=case-lib/lib.sh +source "$(dirname "${BASH_SOURCE[0]}")"/../case-lib/lib.sh + +OPT_NAME['t']='tplg' OPT_DESC['t']="tplg file, default value is env TPLG: $TPLG" +OPT_HAS_ARG['t']=1 OPT_VAL['t']="$TPLG" + +OPT_NAME['l']='loop' OPT_DESC['l']='loop count' +OPT_HAS_ARG['l']=1 OPT_VAL['l']=5 + +OPT_NAME['c']='count' OPT_DESC['c']='combine test pipeline count' +OPT_HAS_ARG['c']=1 OPT_VAL['c']=2 + +OPT_NAME['r']='repeat' OPT_DESC['r']='pause release repeat count' +OPT_HAS_ARG['r']=1 OPT_VAL['r']=5 + +# pause/release interval will be a random value bounded by the min and max values below +OPT_NAME['i']='min' OPT_DESC['i']='pause/release transition min value, unit is ms' +OPT_HAS_ARG['i']=1 OPT_VAL['i']='20' + +OPT_NAME['a']='max' OPT_DESC['a']='pause/release transition max value, unit is ms' +OPT_HAS_ARG['a']=1 OPT_VAL['a']='50' + +OPT_NAME['s']='sof-logger' OPT_DESC['s']="Open sof-logger trace the data will store at $LOG_ROOT" +OPT_HAS_ARG['s']=0 OPT_VAL['s']=1 + +func_opt_parse_option "$@" + +repeat_count=${OPT_VAL['r']} +loop_count=${OPT_VAL['l']} +# configure random value range +rnd_min=${OPT_VAL['i']} +rnd_max=${OPT_VAL['a']} +rnd_range=$((rnd_max - rnd_min)) +[[ $rnd_range -le 0 ]] && dlogw "Error random range scope [ min:$rnd_min - max:$rnd_max ]" && exit 2 + +tplg=${OPT_VAL['t']} +func_pipeline_export "$tplg" "type:any & ~pcm:Amplifier Reference" + +logger_disabled || func_lib_start_log_collect + +declare -a pipeline_idx_lst +declare -a cmd_idx_lst +declare -a file_idx_lst + +# merge all pipeline to the 1 group +for i in $(seq 0 $((PIPELINE_COUNT - 1))) +do + pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i") + type=$(func_pipeline_parse_value "$i" type) + if [ "$type" == "playback" ];then + cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay") + file_idx_lst=("${file_idx_lst[@]}" "/dev/zero") + elif [ "$type" == "capture" ];then + cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord") + file_idx_lst=("${file_idx_lst[@]}" "/dev/null") + elif [ "$type" == "both" ];then + cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay") + file_idx_lst=("${file_idx_lst[@]}" "/dev/zero") + # both include playback & capture, so duplicate it + pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i") + cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord") + file_idx_lst=("${file_idx_lst[@]}" "/dev/null") + else + die "Unknow pipeline type: $type" + fi +done + +# get the min value of TPLG:'pipeline count' with Case:'pipeline count' +[[ ${#pipeline_idx_lst[*]} -gt ${OPT_VAL['c']} ]] && max_count=${OPT_VAL['c']} || max_count=${#pipeline_idx_lst[*]} +[[ $max_count -eq 1 ]] && dlogw "pipeline count is 1, don't need to run this case" && exit 2 + +# create combination list +declare -a pipeline_combine_lst +for i in $(sof-combinatoric.py -n ${#pipeline_idx_lst[*]} -p "$max_count") +do + # convert combine string to combine element by replacing commas with spaces for the for loop below + pipeline_combine_str="${i//,/ }" + pipeline_combine_lst=("${pipeline_combine_lst[@]}" "$pipeline_combine_str") +done +[[ ${#pipeline_combine_lst[@]} -eq 0 ]] && dlogw "pipeline combine is empty" && exit 2 + +func_pause_release_pipeline() +{ + local idx=${pipeline_idx_lst[$1]} cmd=${cmd_idx_lst[$1]} file=${file_idx_lst[$1]} + local channel; channel=$(func_pipeline_parse_value "$idx" channel) + local rate; rate=$(func_pipeline_parse_value "$idx" rate) + local fmt; fmt=$(func_pipeline_parse_value "$idx" fmt) + local dev; dev=$(func_pipeline_parse_value "$idx" dev) + local pcm; pcm=$(func_pipeline_parse_value "$idx" pcm) + local type; type=$(func_pipeline_parse_value "$idx" type) + # expect is tcl language script + # expr rand(): produces random numbers between 0 and 1 + # after ms: Ms must be an integer giving a time in milliseconds. + # The command sleeps for ms milliseconds and then returns. + dlogi "$pcm to command: $cmd -D $dev -r $rate -c $channel -f $fmt -vv -i $file -q" + expect < $repeat_count } { exit 0 } + exp_continue + } +} +exit 1 +END +} + +# to prevent infinite loop, 5 second per a repeat is plenty +max_wait_time=$((5 * repeat_count)) + +for i in $(seq 1 "$loop_count") +do + dlogi "===== Loop count( $i / $loop_count ) =====" + # set up checkpoint for each iteration + setup_kernel_check_point + for pipeline_combine_str in "${pipeline_combine_lst[@]}" + do + unset pid_lst + declare -a pid_lst + for idx in $pipeline_combine_str + do + func_pause_release_pipeline "$idx" + pid_lst=("${pid_lst[@]}" $!) + done + # wait for expect script finished + dlogi "wait for expect process finished" + iwait=$max_wait_time + while [ $iwait -gt 0 ] + do + iwait=$((iwait - 1)) + sleep 1s + [[ ! "$(pidof expect)" ]] && break + done + # fix aplay/arecord last output + echo + if [ "$(pidof expect)" ]; then + dloge "Still have expect process not finished after wait for $max_wait_time" + # list aplay/arecord processes + pgrep -a -f aplay || true + pgrep -a -f arecord || true + exit 1 + fi + # now check for all expect quit status + # dump the pipeline combine, because pause release will have too many operation log + for idx in $pipeline_combine_str + do + pipeline_index=${pipeline_idx_lst[$idx]} + pcm=$(func_pipeline_parse_value "$pipeline_index" pcm) + dlogi "pipeline: $pcm with ${cmd_idx_lst[$idx]}" + done + dlogi "Check expect exit status" + for pid in "${pid_lst[@]}" + do + wait "$pid" || { + sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || true + die "pause release PID $pid had non-zero exit status" + } + done + done + # check kernel log for each iteration to catch issues + sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || die "Caught error in kernel log" +done + diff --git a/test-case/multiple-pause-resume.sh b/test-case/multiple-pause-resume.sh deleted file mode 100755 index 598990b7..00000000 --- a/test-case/multiple-pause-resume.sh +++ /dev/null @@ -1,198 +0,0 @@ -#!/bin/bash - -set -e - -## -## Case Name: Run multiple pipeline for pause resume -## Preconditions: -## N/A -## Description: -## pickup multiple pipline to do pause resume -## fake pause/resume with expect -## expect sleep for sleep time then mocks spacebar keypresses ' ' to -## cause resume action -## Case step: -## 1. run 1st pipeline -## 2. pickup any other pipeline -## 3. use expect to fake pause/resume in each pipeline -## 4. go through with tplg file -## Expect result: -## no errors occur for either process -## - -# shellcheck source=case-lib/lib.sh -source "$(dirname "${BASH_SOURCE[0]}")"/../case-lib/lib.sh - -OPT_NAME['t']='tplg' OPT_DESC['t']="tplg file, default value is env TPLG: $TPLG" -OPT_HAS_ARG['t']=1 OPT_VAL['t']="$TPLG" - -OPT_NAME['l']='loop' OPT_DESC['l']='loop count' -OPT_HAS_ARG['l']=1 OPT_VAL['l']=5 - -OPT_NAME['c']='count' OPT_DESC['c']='combine test pipeline count' -OPT_HAS_ARG['c']=1 OPT_VAL['c']=2 - -OPT_NAME['r']='repeat' OPT_DESC['r']='pause resume repeat count' -OPT_HAS_ARG['r']=1 OPT_VAL['r']=5 - -# pause/resume interval will be a random value bounded by the min and max values below -OPT_NAME['i']='min' OPT_DESC['i']='pause/resume transition min value, unit is ms' -OPT_HAS_ARG['i']=1 OPT_VAL['i']='20' - -OPT_NAME['a']='max' OPT_DESC['a']='pause/resume transition max value, unit is ms' -OPT_HAS_ARG['a']=1 OPT_VAL['a']='50' - -OPT_NAME['s']='sof-logger' OPT_DESC['s']="Open sof-logger trace the data will store at $LOG_ROOT" -OPT_HAS_ARG['s']=0 OPT_VAL['s']=1 - -func_opt_parse_option "$@" - -repeat_count=${OPT_VAL['r']} -loop_count=${OPT_VAL['l']} -# configure random value range -rnd_min=${OPT_VAL['i']} -rnd_max=${OPT_VAL['a']} -rnd_range=$((rnd_max - rnd_min)) -[[ $rnd_range -le 0 ]] && dlogw "Error random range scope [ min:$rnd_min - max:$rnd_max ]" && exit 2 - -tplg=${OPT_VAL['t']} -func_pipeline_export "$tplg" "type:any & ~pcm:Amplifier Reference" - -logger_disabled || func_lib_start_log_collect - -declare -a pipeline_idx_lst -declare -a cmd_idx_lst -declare -a file_idx_lst - -# merge all pipeline to the 1 group -for i in $(seq 0 $((PIPELINE_COUNT - 1))) -do - pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i") - type=$(func_pipeline_parse_value "$i" type) - if [ "$type" == "playback" ];then - cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay") - file_idx_lst=("${file_idx_lst[@]}" "/dev/zero") - elif [ "$type" == "capture" ];then - cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord") - file_idx_lst=("${file_idx_lst[@]}" "/dev/null") - elif [ "$type" == "both" ];then - cmd_idx_lst=("${cmd_idx_lst[@]}" "aplay") - file_idx_lst=("${file_idx_lst[@]}" "/dev/zero") - # both include playback & capture, so duplicate it - pipeline_idx_lst=("${pipeline_idx_lst[@]}" "$i") - cmd_idx_lst=("${cmd_idx_lst[@]}" "arecord") - file_idx_lst=("${file_idx_lst[@]}" "/dev/null") - else - die "Unknow pipeline type: $type" - fi -done - -# get the min value of TPLG:'pipeline count' with Case:'pipeline count' -[[ ${#pipeline_idx_lst[*]} -gt ${OPT_VAL['c']} ]] && max_count=${OPT_VAL['c']} || max_count=${#pipeline_idx_lst[*]} -[[ $max_count -eq 1 ]] && dlogw "pipeline count is 1, don't need to run this case" && exit 2 - -# create combination list -declare -a pipeline_combine_lst -for i in $(sof-combinatoric.py -n ${#pipeline_idx_lst[*]} -p "$max_count") -do - # convert combine string to combine element by replacing commas with spaces for the for loop below - pipeline_combine_str="${i//,/ }" - pipeline_combine_lst=("${pipeline_combine_lst[@]}" "$pipeline_combine_str") -done -[[ ${#pipeline_combine_lst[@]} -eq 0 ]] && dlogw "pipeline combine is empty" && exit 2 - -func_pause_resume_pipeline() -{ - local idx=${pipeline_idx_lst[$1]} cmd=${cmd_idx_lst[$1]} file=${file_idx_lst[$1]} - local channel; channel=$(func_pipeline_parse_value "$idx" channel) - local rate; rate=$(func_pipeline_parse_value "$idx" rate) - local fmt; fmt=$(func_pipeline_parse_value "$idx" fmt) - local dev; dev=$(func_pipeline_parse_value "$idx" dev) - local pcm; pcm=$(func_pipeline_parse_value "$idx" pcm) - local type; type=$(func_pipeline_parse_value "$idx" type) - # expect is tcl language script - # expr rand(): produces random numbers between 0 and 1 - # after ms: Ms must be an integer giving a time in milliseconds. - # The command sleeps for ms milliseconds and then returns. - dlogi "$pcm to command: $cmd -D $dev -r $rate -c $channel -f $fmt -vv -i $file -q" - expect < $repeat_count } { exit 0 } - exp_continue - } -} -exit 1 -END -} - -# to prevent infinite loop, 5 second per a repeat is plenty -max_wait_time=$((5 * repeat_count)) - -for i in $(seq 1 "$loop_count") -do - dlogi "===== Loop count( $i / $loop_count ) =====" - # set up checkpoint for each iteration - setup_kernel_check_point - for pipeline_combine_str in "${pipeline_combine_lst[@]}" - do - unset pid_lst - declare -a pid_lst - for idx in $pipeline_combine_str - do - func_pause_resume_pipeline "$idx" - pid_lst=("${pid_lst[@]}" $!) - done - # wait for expect script finished - dlogi "wait for expect process finished" - iwait=$max_wait_time - while [ $iwait -gt 0 ] - do - iwait=$((iwait - 1)) - sleep 1s - [[ ! "$(pidof expect)" ]] && break - done - # fix aplay/arecord last output - echo - if [ "$(pidof expect)" ]; then - dloge "Still have expect process not finished after wait for $max_wait_time" - # list aplay/arecord processes - pgrep -a -f aplay || true - pgrep -a -f arecord || true - exit 1 - fi - # now check for all expect quit status - # dump the pipeline combine, because pause resume will have too many operation log - for idx in $pipeline_combine_str - do - pipeline_index=${pipeline_idx_lst[$idx]} - pcm=$(func_pipeline_parse_value "$pipeline_index" pcm) - dlogi "pipeline: $pcm with ${cmd_idx_lst[$idx]}" - done - dlogi "Check expect exit status" - for pid in "${pid_lst[@]}" - do - wait "$pid" || { - sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || true - die "pause resume PID $pid had non-zero exit status" - } - done - done - # check kernel log for each iteration to catch issues - sof-kernel-log-check.sh "$KERNEL_CHECKPOINT" || die "Caught error in kernel log" -done - diff --git a/test-case/multiple-pause-resume.sh b/test-case/multiple-pause-resume.sh new file mode 120000 index 00000000..8b47d5e1 --- /dev/null +++ b/test-case/multiple-pause-resume.sh @@ -0,0 +1 @@ +multiple-pause-release.sh \ No newline at end of file diff --git a/test-case/run-all-tests.sh b/test-case/run-all-tests.sh index 0b3d7dba..b005b1e1 100755 --- a/test-case/run-all-tests.sh +++ b/test-case/run-all-tests.sh @@ -32,8 +32,8 @@ capture_d1l100r1 playback_d1l1r50 capture_d1l1r50 speaker -pause-resume-playback -pause-resume-capture +pause-release-playback +pause-release-capture volume signal-stop-start-playback signal-stop-start-capture @@ -42,7 +42,7 @@ xrun-injection-capture simultaneous-playback-capture multiple-pipeline-playback multiple-pipeline-capture -multiple-pause-resume +multiple-pause-release kmod-load-unload kmod-load-unload-after-playback suspend-resume @@ -227,13 +227,13 @@ test_speaker() { "$mydir"/test-speaker.sh -l "$medium_loop" } -test_pause-resume-playback() +test_pause-release-playback() { - "$mydir"/check-pause-resume.sh -c "$large_count" -m playback + "$mydir"/check-pause-release.sh -c "$large_count" -m playback } -test_pause-resume-capture() +test_pause-release-capture() { - "$mydir"/check-pause-resume.sh -c "$large_count" -m capture + "$mydir"/check-pause-release.sh -c "$large_count" -m capture } test_volume() { @@ -271,9 +271,9 @@ test_multiple-pipeline-capture() { "$mydir"/multiple-pipeline-capture.sh -l "$medium_loop" } -test_multiple-pause-resume() +test_multiple-pause-release() { - "$mydir"/multiple-pause-resume.sh -l "$small_loop" -r 25 + "$mydir"/multiple-pause-release.sh -l "$small_loop" -r 25 } test_kmod-load-unload() {