• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/bin/bash
2
3set -x
4
5# Setup paths and import util functions
6. $(dirname $(readlink -f "$0"))/util.sh
7
8TESTS=""
9BACKENDS=""
10NUM_THREADS=${NUM_THREADS:-$(expr $(expr $(nproc) / 8) + 1)}
11
12USE_HOST_GLES=0
13TEST_APP="deqp"
14
15parse_input()
16{
17   while  [ -n "$1" ]; do
18      case $1 in
19
20      -a|--all-backends)
21         BACKENDS=""
22         BACKENDS="$BACKENDS vtest-softpipe"
23         BACKENDS="$BACKENDS vtest-llvmpipe"
24         BACKENDS="$BACKENDS vtest-gpu"
25         BACKENDS="$BACKENDS softpipe"
26         BACKENDS="$BACKENDS llvmpipe"
27         BACKENDS="$BACKENDS gpu"
28         ;;
29
30   	-v|--vtest)
31         BACKENDS=""
32         BACKENDS="$BACKENDS vtest-softpipe"
33         BACKENDS="$BACKENDS vtest-llvmpipe"
34         BACKENDS="$BACKENDS vtest-gpu"
35         ;;
36
37      --host-gles)
38         USE_HOST_GLES=1
39         ;;
40
41      --host-gl)
42         USE_HOST_GLES=0
43         ;;
44
45      -b|--backend)
46         NEW_BACKEND="$2"
47         shift
48         BACKENDS="$BACKENDS $NEW_BACKEND"
49         ;;
50
51      -j|--threads)
52         NUM_THREADS=$2
53         shift
54         ;;
55
56   	--gles2)
57         TESTS="$TESTS gles2"
58   	   ;;
59
60   	--gles3)
61         TESTS="$TESTS gles3"
62   	   ;;
63
64   	--gles31)
65         TESTS="$TESTS gles31"
66   	   ;;
67
68      --gl30)
69         TESTS="$TESTS gl30"
70         ;;
71
72      --gl31)
73         TESTS="$TESTS gl31"
74         ;;
75
76      --gl32)
77         TESTS="$TESTS gl32"
78         ;;
79
80      -d|--deqp)
81         TEST_APP="deqp"
82         ;;
83
84      -p|--piglit)
85         TEST_APP="piglit"
86         ;;
87
88   	*)
89   	   echo "Unknown flag $1"
90   	   exit 1
91      esac
92      shift
93   done
94
95   if [[ -z $BACKENDS ]]; then
96      BACKENDS="gpu"
97   fi
98}
99
100compare_previous()
101{
102   if [ ! -f $PREVIOUS_RESULTS_DIR/results.txt ]; then
103      return 2
104   fi
105
106   # Piglit tests use @ as separator for path/to/test
107   IGNORE_TESTS=$(sed "s/\@/\//g" $IGNORE_TESTS_FILE 2>/dev/null)
108
109   # Avoid introducing changes while doing this comparison
110   TMP_RESULTS=$(mktemp /tmp/virgl_ci_results.XXXXXX)
111   TMP_PREV_RESULTS=$(mktemp /tmp/virgl_ci_previous_results.XXXXXX)
112   cp $RESULTS_DIR/results.txt $TMP_RESULTS
113   cp $PREVIOUS_RESULTS_DIR/results.txt $TMP_PREV_RESULTS
114
115   for TEST in $IGNORE_TESTS; do
116      sed -i "\:$TEST:d" $TMP_RESULTS $TMP_PREV_RESULTS
117   done
118
119   if [ "$TEST_APP" = "piglit" ]; then
120      # This distinction adds too much variability
121      sed -i "s/crash/fail/g" $TMP_RESULTS $TMP_PREV_RESULTS
122   elif [ "$TEST_APP" = "deqp" ]; then
123      # This distinction adds too much variability
124      sed -i "s/QualityWarning/Pass/g" $TMP_RESULTS $TMP_PREV_RESULTS
125      sed -i "s/CompatibilityWarning/Pass/g" $TMP_RESULTS $TMP_PREV_RESULTS
126   fi
127
128   # Sort results files
129   sort -V $TMP_RESULTS -o $TMP_RESULTS
130   sort -V $TMP_PREV_RESULTS -o $TMP_PREV_RESULTS
131
132   diff -u $TMP_PREV_RESULTS $TMP_RESULTS 2>&1 > $RESULTS_DIR/regression_diff.txt
133   if [ $? -ne 0 ]; then
134      touch $VIRGL_PATH/results/regressions_detected
135      return 1
136   else
137      rm -rf $RESULTS_DIR/regression_diff.txt
138      return 0
139   fi
140}
141
142interpret_results()
143{
144   PASSED_TESTS="$1"
145   TOTAL_TESTS="$2"
146   UNRELIABLE="$3"
147
148   # TODO: Add comparison for the unreliable tests
149   if [ $UNRELIABLE -eq 0 ]; then
150      compare_previous
151      case $? in
152         0)
153         echo "Pass - matches previous results"
154         return 0
155         ;;
156
157         1)
158         echo "Fail - diff against previous results: $RESULTS_DIR/regression_diff.txt"
159         echo -n "Changes detected: "
160         grep ^+ $RESULTS_DIR/regression_diff.txt | wc -l
161         head -n20 $RESULTS_DIR/regression_diff.txt
162         return 1
163         ;;
164
165         2)
166         echo "Pass - no previous results, but passed $PASSED_TESTS/$TOTAL_TESTS tests"
167         return 0
168         ;;
169
170         *)
171         echo "BUG!"
172         return 1
173         ;;
174      esac
175   else
176      if [ $PASSED_TESTS -eq $TOTAL_TESTS ] && [ $TOTAL_TESTS -ne 0 ]; then
177         echo "Pass - passed $PASSED_TESTS/$TOTAL_TESTS tests"
178         return 0
179      else
180         echo "Fail - passed $PASSED_TESTS/$TOTAL_TESTS tests: $RESULTS_DIR/results.txt"
181         return 1
182      fi
183   fi
184}
185
186run_vtest_server()
187{
188   (
189   if [ $USE_HOST_GLES -eq 1 ]; then
190      VTEST_USE_EGL_SURFACELESS=1 \
191      VTEST_USE_GLES=1 \
192      virgl_test_server &>$VTEST_LOG_FILE &
193   else
194      VTEST_USE_EGL_SURFACELESS=1 \
195      virgl_test_server &>$VTEST_LOG_FILE &
196   fi
197   )
198}
199
200run_test_suite()
201{
202   local BACKEND="$1"
203   local TEST_NAME="$2"
204   local UNRELIABLE="$3"
205   local LOCAL_TEST_FILE="$4"
206   local RES_FILE=$RESULTS_FILE
207
208#   echo "run_test_suite() OUTPUT_PATH: $OUTPUT_PATH"
209#   echo "run_test_suite() LOG_FILE: $LOG_FILE"
210#   echo "run_test_suite() RESULTS_FILE: $RESULTS_FILE"
211
212   UNRELIABLE_STRING=""
213   if [ $UNRELIABLE -eq 1 ]; then
214      UNRELIABLE_STRING="unreliable "
215      RES_FILE="$RES_FILE.unreliable"
216   fi
217
218   if [[ $BACKEND == *"vtest"* ]]; then
219      printf "Running ${UNRELIABLE_STRING}$TEST_APP-$TEST_NAME on vtest-$DRIVER_NAME: "
220   else
221      printf "Running ${UNRELIABLE_STRING}$TEST_APP-$TEST_NAME on $DRIVER_NAME: "
222   fi
223
224   if test $UNRELIABLE -eq 1; then
225      LOCAL_TEST_FILE="$IGNORE_TESTS_FILE"
226      if test ! -f $LOCAL_TEST_FILE -o $(wc -l $LOCAL_TEST_FILE | cut -f1 -d' ') -eq 0; then
227         echo "Unreliable: no ignore tests."
228         return 0
229      fi
230   fi
231
232   case $TEST_APP in
233   piglit)
234      # Don't run GLX tests
235      PIGLIT_TESTS=" -x glx"
236
237      if test $UNRELIABLE -eq 1; then
238         # XXX: Fold the glx exception?
239         PIGLIT_TESTS_CMD="--test-list $LOCAL_TEST_FILE"
240      else
241         # TODO: create test_file for normal runs
242         PIGLIT_TESTS_CMD="$PIGLIT_TESTS -t $TEST_NAME"
243
244      fi
245
246      EGL_PLATFORM=x11 \
247      piglit run --platform x11_egl \
248         -l verbose \
249         --jobs $NUM_THREADS \
250         $PIGLIT_TESTS_CMD \
251         gpu \
252         /tmp/  &> $LOG_FILE
253
254      piglit summary console /tmp/ | grep -B 999999 "summary:" | grep -v "summary:" > "$RES_FILE"
255
256      piglit summary html  $(dirname "$RES_FILE")/summary /tmp
257
258      TOTAL_TESTS=$(cat $RES_FILE | wc -l)
259      PASSED_TESTS=$(grep " pass" $RES_FILE | wc -l)
260      ;;
261
262   deqp)
263      deqp  \
264         --cts-build-dir $CTS_PATH/build \
265         --test-names-file "$LOCAL_TEST_FILE" \
266         --results-file "$RES_FILE" \
267         --threads $NUM_THREADS &> $LOG_FILE
268
269      # Remove header
270      sed -i "/#/d" $RES_FILE
271
272      # Sort results file to make diffs easier to read
273      sort -V $RES_FILE -o $RES_FILE
274
275      TOTAL_TESTS=$(cat $RES_FILE | wc -l)
276      PASSED_TESTS=$(grep " Pass" $RES_FILE | wc -l)
277      ;;
278   esac
279
280   interpret_results $PASSED_TESTS $TOTAL_TESTS $UNRELIABLE
281   return $?
282}
283
284create_result_dir()
285{
286   if [[ -n $GALLIUM_DRIVER ]]; then
287      HOST_DRIVER="_${GALLIUM_DRIVER}"
288   elif [[ -n $DRIVER_NAME ]]; then
289      HOST_DRIVER="_${DRIVER_NAME}"
290   fi
291
292   if [ $USE_HOST_GLES -eq 0 ]; then
293      HOST_GL="gl"
294   else
295      HOST_GL="es"
296   fi
297
298   TEST_PATH=${HOST_GL}_host${HOST_DRIVER}/${TEST_APP}_${TEST_NAME}
299   RESULTS_DIR=$VIRGL_PATH/results/${TEST_PATH}
300
301   if [ "$HOST_DRIVER" = "_softpipe" -o "$HOST_DRIVER" = "_llvmpipe" ]; then
302      PREVIOUS_RESULTS_DIR=$VIRGL_PATH/ci/previous_results/${TEST_PATH}
303   else
304      echo WARNING: Results are not up-to-date!
305      PREVIOUS_RESULTS_DIR=$VIRGL_PATH/ci/previous_results/archived/${TEST_PATH}
306   fi
307
308   IGNORE_TESTS_FILE=$PREVIOUS_RESULTS_DIR/ignore_tests.txt
309
310   # Remove comments from test-list
311   FILTERED_TEST_FILE=$(mktemp /tmp/virgl-ci.XXXXX)
312   sed '/^#/d;/^$/d' $IGNORE_TESTS_FILE 2>/dev/null > $FILTERED_TEST_FILE
313   IGNORE_TESTS_FILE=$FILTERED_TEST_FILE
314
315   mkdir -p "$RESULTS_DIR"
316
317   export OUTPUT_PATH="${RESULTS_DIR}"
318   export RESULTS_FILE="${OUTPUT_PATH}/results.txt"
319   export LOG_FILE="${OUTPUT_PATH}/log.txt"
320   export VTEST_LOG_FILE="${OUTPUT_PATH}/vtest_log.txt"
321}
322
323run_test_on_backends()
324{
325   local BACKENDS="$1"
326   local TEST_NAME="$2"
327   local TEST_FILE="$3"
328   local RET=0
329
330#   echo "run_test_on_backends() BACKENDS: $BACKENDS"
331#   echo "run_test_on_backends() TEST_NAME: $TEST_NAME"
332#   echo "run_test_on_backends() TEST_FILE: $TEST_FILE"
333
334   for BACKEND in $BACKENDS; do
335         unset DRIVER_NAME
336         unset GALLIUM_DRIVER
337         unset GALLIVM_PERF
338         unset LIBGL_ALWAYS_SOFTWARE
339         unset VTEST_USE_EGL_SURFACELESS
340
341         case $BACKEND in
342            vtest-softpipe|softpipe)
343               export LIBGL_ALWAYS_SOFTWARE=1
344               export GALLIUM_DRIVER=softpipe
345               export DRIVER_NAME=$GALLIUM_DRIVER
346               ;;
347
348            vtest-llvmpipe|llvmpipe)
349               export GALLIVM_PERF=nopt,no_filter_hacks
350               export LIBGL_ALWAYS_SOFTWARE=1
351               export GALLIUM_DRIVER=llvmpipe
352               export DRIVER_NAME=$GALLIUM_DRIVER
353               ;;
354
355            vtest-gpu|gpu)
356               DEVICE_NAME=$(basename /dev/dri/renderD128)
357               export DRIVER_NAME="$(basename `readlink /sys/class/drm/${DEVICE_NAME}/device/driver`)"
358               ;;
359         esac
360
361         # This case statement is broken into two parts
362         # because for the second part the LOG_FILE has
363         # declared, which is needed to redirect FDs
364         create_result_dir
365
366         case $BACKEND in
367            vtest-*)
368               run_vtest_server
369               export GALLIUM_DRIVER=virpipe
370               ;;
371
372            *)
373               ;;
374         esac
375
376         # Execute both mustpass and unstable tests
377         # Only the former twigger an overall run fail
378         run_test_suite "$BACKEND" "$TEST_NAME" 0 "$TEST_FILE"
379         if [ $? -ne 0 ]; then
380            RET=1
381         fi
382
383         run_test_suite "$BACKEND" "$TEST_NAME" 1 "$TEST_FILE"
384
385         killall -q virgl_test_server
386   done
387
388   return $RET
389}
390
391run_all_tests()
392{
393   local BACKENDS=$1
394   local TESTS=$2
395   local RET=0
396
397   if [ $USE_HOST_GLES -eq 0 ]; then
398      echo "Running test(s) on GL Host"
399      echo "--------------------------"
400   else
401      echo "Running test(s) on GLES Host"
402      echo "----------------------------"
403   fi
404#   echo "run_all_tests() BACKENDS: $BACKENDS"
405#   echo "run_all_tests() TESTS: $TESTS"
406
407
408   # TODO: add similar must pass lists for piglit
409   for TEST in $TESTS; do
410      case $TEST in
411      gles2|gles3|gles31)
412         TEST_FILE="$CTS_PATH/android/cts/master/$TEST-master.txt"
413         ;;
414      gl30|gl31|gl32)
415         TEST_FILE="$CTS_PATH/external/openglcts/data/mustpass/gl/khronos_mustpass/4.6.1.x/$TEST-master.txt"
416         ;;
417      esac
418
419      run_test_on_backends "$BACKENDS" "$TEST" "$TEST_FILE"
420
421      if [ $? -ne 0 ]; then
422         RET=1
423      fi
424   done
425
426   exit $RET
427}
428
429setup()
430{
431   Xvfb :0 -screen 0 1024x768x24 &>/dev/null &
432   export DISPLAY=:0
433   sleep 2
434}
435
436setup
437parse_input $@
438run_all_tests "$BACKENDS" "$TESTS"
439