Browse Source

[LibOS, Pal] Reorganize unit tests

- Deduplicate much of setup code.
- Allow running non-sandbox and sandbox code in single run.
- Use pytest.
- Generate JUnit-XML file for Jenkins.
- Document running a subset of regression tests.
Wojtek Porczyk 5 years ago
parent
commit
01f3553a25
45 changed files with 1026 additions and 1429 deletions
  1. 16 2
      CONTRIBUTING.md
  2. 1 0
      Jenkinsfiles/ubuntu-16.04.dockerfile
  3. 1 0
      Jenkinsfiles/ubuntu-18.04.dockerfile
  4. 0 37
      LibOS/shim/test/regression/00_bootstrap.py
  5. 0 20
      LibOS/shim/test/regression/00_openmp.py
  6. 0 13
      LibOS/shim/test/regression/30_fopen_cornercases.py
  7. 0 22
      LibOS/shim/test/regression/30_futex.py
  8. 0 14
      LibOS/shim/test/regression/30_getcwd.py
  9. 0 30
      LibOS/shim/test/regression/30_getdents.py
  10. 0 29
      LibOS/shim/test/regression/30_mmap.py
  11. 0 29
      LibOS/shim/test/regression/30_sigaltstack.py
  12. 0 25
      LibOS/shim/test/regression/30_stat.py
  13. 0 20
      LibOS/shim/test/regression/30_syscall-redirect.py
  14. 0 24
      LibOS/shim/test/regression/40_proc.py
  15. 0 12
      LibOS/shim/test/regression/40_proc_cpuinfo.py
  16. 0 13
      LibOS/shim/test/regression/40_proc_path.py
  17. 0 13
      LibOS/shim/test/regression/80_epoll_wait_timeout.py
  18. 0 13
      LibOS/shim/test/regression/80_sockets.py
  19. 0 24
      LibOS/shim/test/regression/80_udp.py
  20. 0 24
      LibOS/shim/test/regression/80_unix.py
  21. 0 17
      LibOS/shim/test/regression/90_large-mmap.py
  22. 8 19
      LibOS/shim/test/regression/Makefile
  23. 314 0
      LibOS/shim/test/regression/test_libos.py
  24. 0 23
      Pal/regression/00_Atomics.py
  25. 0 186
      Pal/regression/00_Bootstrap.py
  26. 0 70
      Pal/regression/00_Symbols.py
  27. 0 25
      Pal/regression/01_Exception.py
  28. 0 57
      Pal/regression/02_Directory.py
  29. 0 15
      Pal/regression/02_Event.py
  30. 0 62
      Pal/regression/02_File.py
  31. 0 38
      Pal/regression/02_Memory.py
  32. 0 38
      Pal/regression/02_Misc.py
  33. 0 24
      Pal/regression/02_Pipe.py
  34. 0 18
      Pal/regression/02_Semaphore.py
  35. 0 39
      Pal/regression/02_Socket.py
  36. 0 45
      Pal/regression/02_Thread.py
  37. 0 59
      Pal/regression/03_Process.py
  38. 0 68
      Pal/regression/04_Ipc.py
  39. 0 29
      Pal/regression/04_SendHandle.py
  40. 0 50
      Pal/regression/05_Process.py
  41. 0 61
      Pal/regression/05_Reference_Monitor.py
  42. 0 23
      Pal/regression/06_AvxDisable.py
  43. 7 18
      Pal/regression/Makefile
  44. 625 0
      Pal/regression/test_pal.py
  45. 54 81
      Scripts/regression.py

+ 16 - 2
CONTRIBUTING.md

@@ -115,9 +115,23 @@ make SGX_RUN=1 regression
 
 If a test fails unexpectedly, one can use the KEEP_LOG=1 option to get the complete output.
 
-One can also run individual tests, such as Bootstrap, as:
+One can run tests manually:
+
+```sh
+PYTHONPATH=path/to/graphene/Scripts
+PAL_LOADER=path/to/pal-Linux
+PAL_SEC=path/to/pal_sec-Linux
+export PYTHONPATH PAL_LOADER PAL_SEC
+python3 -m pytest -v -rs test.py
+```
+
+It is also possible to run subset of tests:
 
-```path/to/pal-Linux ./Bootstrap```
+```sh
+# after env export
+python3 -m pytest -v -rs test.py::TC_01_Bootstrap
+python3 -m pytest -v -rs test.py::TC_01_Bootstrap::test_100_basic_boostrapping
+```
 
 The shim unit tests work similarly, and are under LibOS/shim/test/regression
 

+ 1 - 0
Jenkinsfiles/ubuntu-16.04.dockerfile

@@ -18,6 +18,7 @@ RUN apt-get update \
        python \
        python-protobuf \
        python3-minimal \
+       python3-pytest \
        texinfo \
        wget \
        libomp-dev \

+ 1 - 0
Jenkinsfiles/ubuntu-18.04.dockerfile

@@ -17,6 +17,7 @@ RUN apt-get update && apt-get install -y \
     net-tools \
     python \
     python-protobuf \
+    python3-pytest \
     texinfo \
     wget
 

File diff suppressed because it is too large
+ 0 - 37
LibOS/shim/test/regression/00_bootstrap.py


+ 0 - 20
LibOS/shim/test/regression/00_openmp.py

@@ -1,20 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-sgx = os.environ.get('SGX_RUN') == '1'
-
-# This test is only meaningful on SGX PAL because only SGX catches raw syscalls
-# and redirects to Graphene's LibOS. If we will add seccomp to Linux PAL, then
-# we should allow this test on Linux PAL as well.
-if not sgx:
-    sys.exit(0)
-
-# Running OpenMP
-regression = Regression(loader, "openmp")
-
-regression.add_check(name="OpenMP simple for loop",
-    check=lambda res: "first: 0, last: 9" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 13
LibOS/shim/test/regression/30_fopen_cornercases.py

@@ -1,13 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running Long Filepath Example
-regression = Regression(loader, "fopen_cornercases")
-
-regression.add_check(name="fopen corner cases",
-        check=lambda res: "Successfully read from file: Hello World" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 22
LibOS/shim/test/regression/30_futex.py

@@ -1,22 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running futex
-regression = Regression(loader, "futex")
-
-regression.add_check(name="Futex Wake Test",
-    check=lambda res: "Woke all kiddos" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running futex-timeout
-regression = Regression(loader, "futex-timeout")
-
-regression.add_check(name="Futex Timeout Test",
-    check=lambda res: "futex correctly timed out" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 14
LibOS/shim/test/regression/30_getcwd.py

@@ -1,14 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running stat
-regression = Regression(loader, "getcwd")
-
-regression.add_check(name="Getcwd syscall",
-    check=lambda res: "[bss_cwd_buf] getcwd succeeded: /" in res[0].out and \
-                      "[mmapped_cwd_buf] getcwd succeeded: /" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 30
LibOS/shim/test/regression/30_getdents.py

@@ -1,30 +0,0 @@
-#!/usr/bin/env python2
-
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running Bootstrap
-regression = Regression(loader, "getdents", None, 10000)
-
-# This doesn't catch extraneous entries, but should be fine
-# until the LTP test can be run (need symlink support)
-regression.add_check(name="Directory listing (32-bit)",
-    check=lambda res: "getdents: setup ok" in res[0].out and \
-                      "getdents32: . [0x4]" in res[0].out and \
-                      "getdents32: .. [0x4]" in res[0].out and \
-                      "getdents32: file1 [0x8]" in res[0].out and \
-                      "getdents32: file2 [0x8]" in res[0].out and \
-                      "getdents32: dir3 [0x4]" in res[0].out)
-
-regression.add_check(name="Directory listing (64-bit)",
-    check=lambda res: "getdents: setup ok" in res[0].out and \
-                      "getdents64: . [0x4]" in res[0].out and \
-                      "getdents64: .. [0x4]" in res[0].out and \
-                      "getdents64: file1 [0x8]" in res[0].out and \
-                      "getdents64: file2 [0x8]" in res[0].out and \
-                      "getdents64: dir3 [0x4]" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 29
LibOS/shim/test/regression/30_mmap.py

@@ -1,29 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-sgx = os.environ.get('SGX_RUN') == '1'
-
-# Running Bootstrap
-regression = Regression(loader, "mmap-file", None, 60000)
-
-regression.add_check(name="Private mmap beyond file range",
-    check=lambda res: "mmap test 6 passed" in res[0].out and \
-                      "mmap test 7 passed" in res[0].out)
-
-regression.add_check(name="Private mmap beyond file range (after fork)",
-    check=lambda res: "mmap test 1 passed" in res[0].out and \
-                      "mmap test 2 passed" in res[0].out and \
-                      "mmap test 3 passed" in res[0].out and \
-                      "mmap test 4 passed" in res[0].out)
-
-# On SGX, SIGBUS isn't always implemented correctly, for lack
-# of memory protection.  For now, some of these cases won't work.
-if not sgx:
-    regression.add_check(name="SIGBUS test",
-                         check=lambda res: "mmap test 5 passed" in res[0].out and \
-                         "mmap test 8 passed" in res[0].out)
-
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 29
LibOS/shim/test/regression/30_sigaltstack.py

@@ -1,29 +0,0 @@
-import sys
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running sigaltstack
-regression = Regression(loader, "sigaltstack")
-
-messages = (
-    "OK on sigaltstack in main thread before alarm",
-    "&act == 0x",
-    "sig 14 count 1 goes off with sp=0x",
-    "OK on signal stack",
-    "OK on sigaltstack in handler",
-    "sig 14 count 2 goes off with sp=0x",
-    "OK on signal stack",
-    "OK on sigaltstack in handler",
-    "sig 14 count 3 goes off with sp=0x",
-    "OK on signal stack",
-    "OK on sigaltstack in handler",
-    "OK on sigaltstack in main thread",
-    "done exiting",
-)
-
-regression.add_check(name="Sigaltstack Test",
-    check=lambda res: all([x in res[0].out for x in messages]))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 25
LibOS/shim/test/regression/30_stat.py

@@ -1,25 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running stat
-regression = Regression(loader, "stat_invalid_args")
-
-regression.add_check(name="Stat with invalid arguments",
-    check=lambda res: "stat(invalid-path-ptr) correctly returned error" in res[0].out and \
-                      "stat(invalid-buf-ptr) correctly returned error" in res[0].out and \
-                      "lstat(invalid-path-ptr) correctly returned error" in res[0].out and \
-                      "lstat(invalid-buf-ptr) correctly returned error" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running fstat
-regression = Regression(loader, "fstat_cwd")
-
-regression.add_check(name="Fstat on a directory",
-    check=lambda res: "fstat returned the fd type as S_IFDIR" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 20
LibOS/shim/test/regression/30_syscall-redirect.py

@@ -1,20 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-sgx = os.environ.get('SGX_RUN') == '1'
-
-# This test is only meaningful on SGX PAL because only SGX catches raw syscalls
-# and redirects to Graphene's LibOS. If we will add seccomp to Linux PAL, then
-# we should allow this test on Linux PAL as well.
-if not sgx:
-    sys.exit(0)
-
-# Running Syscall Instruction Example
-regression = Regression(loader, "syscall")
-
-regression.add_check(name="Syscall Instruction Redirection",
-    check=lambda res: "Hello world" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 24
LibOS/shim/test/regression/40_proc.py

@@ -1,24 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running Bootstrap
-regression = Regression(loader, "proc")
-
-regression.add_check(name="Base /proc files present",
-    check=lambda res: "/proc/1/.." in res[0].out and \
-                      "/proc/1/cwd" in res[0].out and \
-                      "/proc/1/exe" in res[0].out and \
-                      "/proc/1/root" in res[0].out and \
-                      "/proc/1/fd" in res[0].out and \
-                      "/proc/1/maps" in res[0].out and \
-                      "/proc/." in res[0].out and \
-                      "/proc/1" in res[0].out and \
-                      "/proc/self" in res[0].out and \
-                      "/proc/meminfo" in res[0].out and \
-                      "/proc/cpuinfo" in res[0].out)
-
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 12
LibOS/shim/test/regression/40_proc_cpuinfo.py

@@ -1,12 +0,0 @@
-import sys
-from regression import Regression
-
-loader = sys.argv[1]
-
-regression = Regression(loader, "proc_cpuinfo", None, 50000)
-
-regression.add_check(name="proc/cpuinfo Linux-based formatting",
-    check=lambda res: "cpuinfo test passed" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 13
LibOS/shim/test/regression/40_proc_path.py

@@ -1,13 +0,0 @@
-import os, sys
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running Bootstrap
-regression = Regression(loader, "proc-path")
-
-regression.add_check(name="Base /proc path present",
-    check=lambda res: "proc path test success" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 13
LibOS/shim/test/regression/80_epoll_wait_timeout.py

@@ -1,13 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-regression = Regression(loader, "epoll_wait_timeout", None, 50000)
-
-regression.add_check(name="epoll_wait timeout",
-    args = ['8000'],
-    check=lambda res: "epoll_wait test passed" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 13
LibOS/shim/test/regression/80_sockets.py

@@ -1,13 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running getsockopt
-regression = Regression(loader, "getsockopt", None)
-
-regression.add_check(name="getsockopt",
-    check=lambda res: "getsockopt: Got socket type OK" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 24
LibOS/shim/test/regression/80_udp.py

@@ -1,24 +0,0 @@
-import sys
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running udp
-regression = Regression(loader, "udp", None, 50000)
-
-regression.add_check(name="udp",
-    check=lambda res:
-      "Data: This is packet 0" in res[0].out and
-      "Data: This is packet 1" in res[0].out and
-      "Data: This is packet 2" in res[0].out and
-      "Data: This is packet 3" in res[0].out and
-      "Data: This is packet 4" in res[0].out and
-      "Data: This is packet 5" in res[0].out and
-      "Data: This is packet 6" in res[0].out and
-      "Data: This is packet 7" in res[0].out and
-      "Data: This is packet 8" in res[0].out and
-      "Data: This is packet 9" in res[0].out)
-
-rv = regression.run_checks()
-if rv:
-    sys.exit(rv)

+ 0 - 24
LibOS/shim/test/regression/80_unix.py

@@ -1,24 +0,0 @@
-import sys
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running udp
-regression = Regression(loader, "unix", None)
-
-regression.add_check(name="Unix domain socket",
-    check=lambda res:
-      "Data: This is packet 0" in res[0].out and
-      "Data: This is packet 1" in res[0].out and
-      "Data: This is packet 2" in res[0].out and
-      "Data: This is packet 3" in res[0].out and
-      "Data: This is packet 4" in res[0].out and
-      "Data: This is packet 5" in res[0].out and
-      "Data: This is packet 6" in res[0].out and
-      "Data: This is packet 7" in res[0].out and
-      "Data: This is packet 8" in res[0].out and
-      "Data: This is packet 9" in res[0].out)
-
-rv = regression.run_checks()
-if rv:
-    sys.exit(rv)

+ 0 - 17
LibOS/shim/test/regression/90_large-mmap.py

@@ -1,17 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = sys.argv[1]
-
-# Running Bootstrap
-regression = Regression(loader, "large-mmap", None, 240000)
-
-regression.add_check(name="Ftruncate",
-    check=lambda res: "large-mmap: ftruncate OK" in res[0].out)
-
-regression.add_check(name="Large mmap",
-    check=lambda res: "large-mmap: mmap 1 completed OK" in res[0].out and \
-                     "large-mmap: mmap 2 completed OK" in res[0].out)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 8 - 19
LibOS/shim/test/regression/Makefile

@@ -54,28 +54,17 @@ else
 $(special_executables) $(c_executables) $(cxx_executables):
 endif
 
-PYTHONENV="PYTHONPATH=../../../../Scripts"
+export PAL_LOADER = $(RUNTIME)/pal-$(PAL_HOST)
+export PAL_SEC = $(RUNTIME)/pal_sec-$(PAL_HOST)
+export PYTHONPATH=../../../../Scripts
 
-ifeq ($(SGX_RUN),1)
-	PYTHONENV += "TIMEOUT=20000"
-endif
-
-# 30_sigaltstack.py fails because sigaltstack isn't correctly implemented.
-BLOCKED_TESTS = \
-	30_sigaltstack.py
-get_tests = $(filter-out $(BLOCKED_TESTS),$(wildcard $(1)))
 .PHONY: regression
 regression: $(target)
-	@echo "\n\nBasic Bootstrapping:"
-	@for f in $(call get_tests,00_*.py); do env $(PYTHONENV) python3 $$f $(RUNTIME)/pal-$(PAL_HOST) || exit $$?; done
-	@echo "\n\nSyscall Support:"
-	@for f in $(call get_tests,30_*.py); do env $(PYTHONENV) python3 $$f $(RUNTIME)/pal-$(PAL_HOST) || exit $$?; done
-	@echo "\n\nFile System Support:"
-	@for f in $(call get_tests,40_*.py); do env $(PYTHONENV) python3 $$f $(RUNTIME)/pal-$(PAL_HOST) || exit $$?; done
-	@echo "\n\nSocket Support:"
-	@for f in $(call get_tests,80_*.py); do env $(PYTHONENV) python3 $$f $(RUNTIME)/pal-$(PAL_HOST) || exit $$?; done
-	@echo "\n\nLarge File Support:"
-	@for f in $(call get_tests,90_*.py); do env $(PYTHONENV) python3 $$f $(RUNTIME)/pal-$(PAL_HOST) || exit $$?; done
+	$(RM) libos-regression.xml
+	$(MAKE) libos-regression.xml
+
+libos-regression.xml:
+	python3 -m pytest --junit-xml $@ -v test_libos.py
 
 .PHONY: clean-tmp
 clean-tmp:

+ 314 - 0
LibOS/shim/test/regression/test_libos.py

@@ -0,0 +1,314 @@
+#!/usr/bin/env python3
+
+import mmap
+import os
+import sys
+import unittest
+import subprocess
+
+from regression import (
+    HAS_SGX,
+    RegressionTestCase,
+    SandboxTestCase,
+    expectedFailureIf,
+)
+
+class TC_00_Bootstrap(RegressionTestCase):
+    def test_100_basic_bootstrapping(self):
+        stdout, stderr = self.run_binary(['bootstrap'])
+
+        # Basic Bootstrapping
+        self.assertIn('User Program Started', stdout)
+
+        # One Argument Given
+        self.assertIn('# of Arguments: 1', stdout)
+        self.assertIn('argv[0] = file:bootstrap', stdout)
+
+
+    def test_101_basic_bootstrapping_five_arguments(self):
+        # Five Arguments Given
+        stdout, stderr = self.run_binary(['bootstrap', 'a', 'b', 'c', 'd'])
+        self.assertIn('# of Arguments: 5', stdout)
+        self.assertIn('argv[0] = file:bootstrap', stdout)
+        self.assertIn('argv[1] = a', stdout)
+        self.assertIn('argv[2] = b', stdout)
+        self.assertIn('argv[3] = c', stdout)
+        self.assertIn('argv[4] = d', stdout)
+
+    def test_110_basic_bootstrapping_cxx(self):
+        stdout, stderr = self.run_binary(['bootstrap-c++'])
+
+        # Basic Bootstrapping (C++)
+        self.assertIn('User Program Started', stdout)
+
+    def test_200_exec(self):
+        stdout, stderr = self.run_binary(['exec'])
+
+        # 2 page child binary
+        self.assertIn(
+            '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 '
+            '000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 ',
+            stdout)
+
+    def test_201_fork_and_exec(self):
+        stdout, stderr = self.run_binary(['fork_and_exec'])
+
+        # fork and exec 2 page child binary
+        self.assertIn('child exited with status: 0', stdout)
+        self.assertIn('test completed successfully', stdout)
+
+    def test_202_vfork_and_exec(self):
+        stdout, stderr = self.run_binary(['vfork_and_exec'])
+
+        # vfork and exec 2 page child binary
+        self.assertIn('child exited with status: 0', stdout)
+        self.assertIn('test completed successfully', stdout)
+
+    def test_210_exec_invalid_args(self):
+        stdout, stderr = self.run_binary(['exec_invalid_args'])
+
+        # Execve with invalid pointers in arguments
+        self.assertIn(
+            'execve(invalid-path) correctly returned error', stdout)
+        self.assertIn(
+            'execve(invalid-argv-ptr) correctly returned error', stdout)
+        self.assertIn(
+            'execve(invalid-envp-ptr) correctly returned error', stdout)
+        self.assertIn(
+            'execve(invalid-argv) correctly returned error', stdout)
+        self.assertIn(
+            'execve(invalid-envp) correctly returned error', stdout)
+
+    def test_300_shared_object(self):
+        stdout, stderr = self.run_binary(['shared_object'])
+
+        # Shared Object
+        self.assertIn('Hello world', stdout)
+
+    def test_400_exit(self):
+        with self.expect_returncode(113):
+            self.run_binary(['exit'])
+
+    def test_500_init_fail(self):
+        try:
+            self.run_binary(['init_fail'])
+            self.fail('expected to return nonzero (and != 42)')
+        except subprocess.CalledProcessError as e:
+            self.assertNotEqual(e.returncode, 42, 'expected returncode != 42')
+
+@unittest.skipUnless(HAS_SGX,
+    'This test is only meaningful on SGX PAL because only SGX catches raw '
+    'syscalls and redirects to Graphene\'s LibOS. If we will add seccomp to '
+    'Linux PAL, then we should allow this test on Linux PAL as well.')
+class TC_01_OpenMP(RegressionTestCase):
+    def test_000_simple_for_loop(self):
+        stdout, stderr = self.run_binary(['openmp'])
+
+        # OpenMP simple for loop
+        self.assertIn('first: 0, last: 9', stdout)
+
+class TC_30_Syscall(RegressionTestCase):
+    def test_000_getcwd(self):
+        stdout, stderr = self.run_binary(['getcwd'])
+
+        # Getcwd syscall
+        self.assertIn('[bss_cwd_buf] getcwd succeeded: /', stdout)
+        self.assertIn('[mmapped_cwd_buf] getcwd succeeded: /', stdout)
+
+    def test_010_stat_invalid_args(self):
+        stdout, stderr = self.run_binary(['stat_invalid_args'])
+
+        # Stat with invalid arguments
+        self.assertIn('stat(invalid-path-ptr) correctly returned error', stdout)
+        self.assertIn('stat(invalid-buf-ptr) correctly returned error', stdout)
+        self.assertIn('lstat(invalid-path-ptr) correctly returned error', stdout)
+        self.assertIn('lstat(invalid-buf-ptr) correctly returned error', stdout)
+
+    def test_011_fstat_cwd(self):
+        stdout, stderr = self.run_binary(['fstat_cwd'])
+
+        # fstat on a directory
+        self.assertIn('fstat returned the fd type as S_IFDIR', stdout)
+
+    def test_020_getdents(self):
+        # This doesn't catch extraneous entries, but should be fine
+        # until the LTP test can be run (need symlink support)
+
+        stdout, stderr = self.run_binary(['getdents'])
+        self.assertIn('getdents: setup ok', stdout)
+
+        # Directory listing (32-bit)
+        self.assertIn('getdents32: . [0x4]', stdout)
+        self.assertIn('getdents32: .. [0x4]', stdout)
+        self.assertIn('getdents32: file1 [0x8]', stdout)
+        self.assertIn('getdents32: file2 [0x8]', stdout)
+        self.assertIn('getdents32: dir3 [0x4]', stdout)
+
+        # Directory listing (64-bit)
+        self.assertIn('getdents64: . [0x4]', stdout)
+        self.assertIn('getdents64: .. [0x4]', stdout)
+        self.assertIn('getdents64: file1 [0x8]', stdout)
+        self.assertIn('getdents64: file2 [0x8]', stdout)
+        self.assertIn('getdents64: dir3 [0x4]', stdout)
+
+    def test_030_fopen(self):
+        stdout, stderr = self.run_binary(['fopen_cornercases'])
+
+        # fopen corner cases
+        self.assertIn('Successfully read from file: Hello World', stdout)
+
+    def test_040_futex_wake(self):
+        stdout, stderr = self.run_binary(['futex'])
+
+        # Futex Wake Test
+        self.assertIn('Woke all kiddos', stdout)
+
+    def test_041_futex_timeout(self):
+        stdout, stderr = self.run_binary(['futex-timeout'])
+
+        # Futex Timeout Test
+        self.assertIn('futex correctly timed out', stdout)
+
+    def test_050_mmap(self):
+        stdout, stderr = self.run_binary(['mmap-file'], timeout=60)
+
+        # Private mmap beyond file range
+        self.assertIn('mmap test 6 passed', stdout)
+        self.assertIn('mmap test 7 passed', stdout)
+
+        # Private mmap beyond file range (after fork)
+        self.assertIn('mmap test 1 passed', stdout)
+        self.assertIn('mmap test 2 passed', stdout)
+        self.assertIn('mmap test 3 passed', stdout)
+        self.assertIn('mmap test 4 passed', stdout)
+
+    @unittest.skipIf(HAS_SGX,
+        'On SGX, SIGBUS isn\'t always implemented correctly, for lack '
+        'of memory protection. For now, some of these cases won\'t work.')
+    def test_051_mmap_sgx(self):
+        stdout, stderr = self.run_binary(['mmap-file'], timeout=60)
+
+        # SIGBUS test
+        self.assertIn('mmap test 5 passed', stdout)
+        self.assertIn('mmap test 8 passed', stdout)
+
+    def test_52_large_mmap(self):
+        stdout, stderr = self.run_binary(['large-mmap'], timeout=240)
+
+        # Ftruncate
+        self.assertIn('large-mmap: ftruncate OK', stdout)
+
+        # Large mmap
+        self.assertIn('large-mmap: mmap 1 completed OK', stdout)
+        self.assertIn('large-mmap: mmap 2 completed OK', stdout)
+
+    @unittest.skip('sigaltstack isn\'t correctly implemented')
+    def test_060_sigaltstack(self):
+        stdout, stderr = self.run_binary(['sigaltstack'])
+
+        # Sigaltstack Test
+        self.assertIn('OK on sigaltstack in main thread before alarm', stdout)
+        self.assertIn('&act == 0x', stdout)
+        self.assertIn('sig 14 count 1 goes off with sp=0x', stdout)
+        self.assertIn('OK on signal stack', stdout)
+        self.assertIn('OK on sigaltstack in handler', stdout)
+        self.assertIn('sig 14 count 2 goes off with sp=0x', stdout)
+        self.assertIn('OK on signal stack', stdout)
+        self.assertIn('OK on sigaltstack in handler', stdout)
+        self.assertIn('sig 14 count 3 goes off with sp=0x', stdout)
+        self.assertIn('OK on signal stack', stdout)
+        self.assertIn('OK on sigaltstack in handler', stdout)
+        self.assertIn('OK on sigaltstack in main thread', stdout)
+        self.assertIn('done exiting', stdout)
+
+@unittest.skipUnless(HAS_SGX,
+    'This test is only meaningful on SGX PAL because only SGX catches raw '
+    'syscalls and redirects to Graphene\'s LibOS. If we will add seccomp to '
+    'Linux PAL, then we should allow this test on Linux PAL as well.')
+class TC_31_SyscallSGX(RegressionTestCase):
+    def test_000_syscall_redirect(self):
+        stdout, stderr = self.run_binary(['syscall'])
+
+        # Syscall Instruction Redirection
+        self.assertIn('Hello world', stdout)
+
+class TC_40_FileSystem(RegressionTestCase):
+    def test_000_base(self):
+        stdout, stderr = self.run_binary(['proc'])
+
+        # Base /proc files present
+        self.assertIn('/proc/1/..', stdout)
+        self.assertIn('/proc/1/cwd', stdout)
+        self.assertIn('/proc/1/exe', stdout)
+        self.assertIn('/proc/1/root', stdout)
+        self.assertIn('/proc/1/fd', stdout)
+        self.assertIn('/proc/1/maps', stdout)
+        self.assertIn('/proc/.', stdout)
+        self.assertIn('/proc/1', stdout)
+        self.assertIn('/proc/self', stdout)
+        self.assertIn('/proc/meminfo', stdout)
+        self.assertIn('/proc/cpuinfo', stdout)
+
+    def test_010_path(self):
+        stdout, stderr = self.run_binary(['proc-path'])
+
+        # Base /proc path present
+        self.assertIn('proc path test success', stdout)
+
+    def test_020_cpuinfo(self):
+        stdout, stderr = self.run_binary(['proc_cpuinfo'], timeout=50)
+
+        # proc/cpuinfo Linux-based formatting
+        self.assertIn('cpuinfo test passed', stdout)
+
+class TC_80_Socket(RegressionTestCase):
+    def test_000_getsockopt(self):
+        stdout, stderr = self.run_binary(['getsockopt'])
+        self.assertIn('getsockopt: Got socket type OK', stdout)
+
+    def test_010_epoll_wait_timeout(self):
+        stdout, stderr = self.run_binary(['epoll_wait_timeout', '8000'],
+            timeout=50)
+
+        # epoll_wait timeout
+        self.assertIn('epoll_wait test passed', stdout)
+
+    def test_100_socket_unix(self):
+        stdout, stderr = self.run_binary(['unix'])
+        self.assertIn('Data: This is packet 0', stdout)
+        self.assertIn('Data: This is packet 1', stdout)
+        self.assertIn('Data: This is packet 2', stdout)
+        self.assertIn('Data: This is packet 3', stdout)
+        self.assertIn('Data: This is packet 4', stdout)
+        self.assertIn('Data: This is packet 5', stdout)
+        self.assertIn('Data: This is packet 6', stdout)
+        self.assertIn('Data: This is packet 7', stdout)
+        self.assertIn('Data: This is packet 8', stdout)
+        self.assertIn('Data: This is packet 9', stdout)
+
+    def test_200_socket_udp(self):
+        stdout, stderr = self.run_binary(['udp'], timeout=50)
+        self.assertIn('Data: This is packet 0', stdout)
+        self.assertIn('Data: This is packet 1', stdout)
+        self.assertIn('Data: This is packet 2', stdout)
+        self.assertIn('Data: This is packet 3', stdout)
+        self.assertIn('Data: This is packet 4', stdout)
+        self.assertIn('Data: This is packet 5', stdout)
+        self.assertIn('Data: This is packet 6', stdout)
+        self.assertIn('Data: This is packet 7', stdout)
+        self.assertIn('Data: This is packet 8', stdout)
+        self.assertIn('Data: This is packet 9', stdout)

+ 0 - 23
Pal/regression/00_Atomics.py

@@ -1,23 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-sgx = os.environ.get('SGX_RUN') == '1'
-
-def manifest_file(file):
-    if sgx:
-        return file + '.manifest.sgx'
-    else:
-        return file + '.manifest'
-
-# Running AtomicMath
-regression = Regression(loader, "AtomicMath")
-
-regression.add_check(name="Atomic Math",
-    check=lambda res: "Subtract INT_MIN: Both values match 2147483648" in res[0].log and \
-                     "Subtract INT_MAX: Both values match -2147483647" in res[0].log and \
-                     "Subtract LLONG_MIN: Both values match -9223372036854775808" in res[0].log and \
-                     "Subtract LLONG_MAX: Both values match -9223372036854775807" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 186
Pal/regression/00_Bootstrap.py

@@ -1,186 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-sgx = os.environ.get('SGX_RUN') == '1'
-
-def manifest_file(file):
-    if sgx:
-        return file + '.manifest.sgx'
-    else:
-        return file + '.manifest'
-
-# Running Bootstrap
-regression = Regression(loader, "Bootstrap")
-
-regression.add_check(name="Basic Bootstrapping",
-    check=lambda res: "User Program Started" in res[0].log)
-
-regression.add_check(name="Control Block: Executable Name",
-    check=lambda res: "Loaded Executable: file:Bootstrap" in res[0].log)
-
-regression.add_check(name="One Argument Given",
-    check=lambda res: "# of Arguments: 1" in res[0].log and \
-            "argv[0] = file:Bootstrap" in res[0].log)
-
-regression.add_check(name="Five Arguments Given",
-    args = ['a', 'b', 'c', 'd'],
-    check=lambda res: "# of Arguments: 5" in res[0].log and \
-           "argv[1] = a" in res[0].log and "argv[2] = b" in res[0].log and \
-           "argv[3] = c" in res[0].log and "argv[4] = d" in res[0].log)
-
-regression.add_check(name="Control Block: Debug Stream (Inline)",
-    check=lambda res: "Written to Debug Stream" in res[0].out)
-
-regression.add_check(name="Control Block: Page Size",
-    check=lambda res: ("Page Size: %d" % (mmap.PAGESIZE)) in res[0].log)
-
-regression.add_check(name="Control Block: Allocation Alignment",
-    check=lambda res: ("Allocation Alignment: %d" % (mmap.ALLOCATIONGRANULARITY)) in res[0].log)
-
-regression.add_check(name="Control Block: Executable Range",
-    check=lambda res: "Executable Range OK" in res[0].log)
-
-def check_cpu_info(res):
-    cpu_num = cpu_model = cpu_family = cpu_stepping = 0
-    cpu_vendor = cpu_brand = cpu_flags = None
-
-    f = open("/proc/cpuinfo", "r")
-    for line in f:
-        line = line.strip()
-        pos = line.find(":")
-        if pos == -1:
-            continue
-
-        key = line[:pos].strip()
-        val = line[pos+1:].strip()
-        if key == "processor":  cpu_num += 1
-        if key == "vendor_id":  cpu_vendor = val
-        if key == "cpu family": cpu_family = int(val)
-        if key == "model":      cpu_model = int(val)
-        if key == "model name": cpu_brand = val
-        if key == "stepping":   cpu_stepping = int(val)
-        if key == "flags":
-            cpu_flags = []
-            for flag in val.split(" "):
-                if flag in ["fpu", "vme", "de", "pse", "tsc", "msr", "pae",
-                        "mce", "cx8", "apic", "sep", "mtrr", "pge", "mca",
-                        "cmov", "pat", "pse36", "pn", "clflush", "dts", "acpi",
-                        "mmx", "fxsr", "sse", "sse2", "ss", "ht", "tm",
-                        "ia64", "pbe"]:
-                    cpu_flags.append(flag)
-            cpu_flags = " ".join(cpu_flags)
-
-    return ("CPU num: %d"      % cpu_num)      in res[0].log and \
-           ("CPU vendor: %s"   % cpu_vendor)   in res[0].log and \
-           ("CPU brand: %s"    % cpu_brand)    in res[0].log and \
-           ("CPU family: %d"   % cpu_family)   in res[0].log and \
-           ("CPU model: %d"    % cpu_model)    in res[0].log and \
-           ("CPU stepping: %d" % cpu_stepping) in res[0].log and \
-           ("CPU flags: %s"    % cpu_flags)    in res[0].log
-
-regression.add_check(name="Control Block: CPU Info",
-    check=check_cpu_info)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running ..Bootstrap
-regression = Regression(loader, "..Bootstrap")
-
-regression.add_check(name="Dotdot handled properly",
-    check=lambda res: "User Program Started" in res[0].log)
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-
-# Running Bootstrap2
-regression = Regression(loader, manifest_file("Bootstrap2"))
-
-regression.add_check(name="Control Block: Manifest as Executable Name",
-    check=lambda res: "Loaded Manifest: file:" + manifest_file("Bootstrap2") in res[0].log
-                     and "User Program Started" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running Bootstrap3
-regression = Regression(loader, "Bootstrap3")
-
-regression.add_check(name="Preload Libraries",
-    check=lambda res: "Binary 1 Preloaded" in res[0].log and
-                      "Binary 2 Preloaded" in res[0].log)
-
-regression.add_check(name="Preload Libraries Linking",
-    check=lambda res: "Preloaded Function 1 Called" in res[0].log and
-                      "Preloaded Function 2 Called" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running Bootstrap4
-regression = Regression(loader, manifest_file("Bootstrap4"))
-
-regression.add_check(name="Control Block: Manifest as Argument",
-    check=lambda res: any([line.startswith("Loaded Manifest: file:" + manifest_file("Bootstrap4")) for line in res[0].log]))
-
-regression.add_check(name="Control Block: Executable as in Manifest",
-    check=lambda res: "Loaded Executable: file:Bootstrap" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running Bootstrap4.manifest
-regression = Regression(executable = "./" + manifest_file("Bootstrap4"))
-
-regression.add_check(name="Control Block: Manifest as Argument (Load by Shebang)",
-    check=lambda res: "Loaded Manifest: file:" + manifest_file("Bootstrap4") in res[0].log)
-
-regression.add_check(name="Control Block: Executable as in Manifest (Load by Shebang)",
-    check=lambda res: "Loaded Executable: file:Bootstrap" in res[0].log)
-
-regression.add_check(name="Arguments: loader.execname in Manifest",
-    check=lambda res: "argv[0] = Bootstrap" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running Bootstrap5.manifest
-regression = Regression(loader, manifest_file("Bootstrap5"))
-
-regression.add_check(name="Bootstrap without Executable but Preload Libraries",
-    check=lambda res: "Binary 1 Preloaded" in res[0].log and
-                      "Binary 2 Preloaded" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running Bootstrap6.manifest - SGX-specific test
-if sgx:
-    regression = Regression(loader, manifest_file("Bootstrap6"), timeout = 200000)
-    regression.add_check(name="8GB Enclave Creation (SGX Only)",
-                         check=lambda res: "Loaded Manifest: file:Bootstrap6.manifest.sgx" in res[0].log and
-                         "Executable Range OK" in res[0].log)
-
-    rv = regression.run_checks()
-    if rv: sys.exit(rv)
-
-# Running Bootstrap7.manifest
-regression = Regression(loader, manifest_file("Bootstrap7"))
-
-regression.add_check(name="Load Large Number of Items in Manifest",
-    check=lambda res: "key1000=na" in res[0].log and
-                      "key1=na" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-# Running an executable that doesn't exist, should at least warn you
-regression = Regression(loader, "fakenews")
-
-regression.add_check(name="Error on missing executable and manifest",
-    check=lambda res: "Executable not found" in res[0].log and
-                     any([line.startswith("USAGE: ") for line  in res[0].log]))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 70
Pal/regression/00_Symbols.py

@@ -1,70 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Symbols")
-
-all_symbols = [
-    'DkVirtualMemoryAlloc',
-    'DkVirtualMemoryFree',
-    'DkVirtualMemoryProtect',
-    'DkProcessCreate',
-    'DkProcessExit',
-    'DkProcessSandboxCreate',
-    'DkStreamOpen',
-    'DkStreamWaitForClient',
-    'DkStreamRead',
-    'DkStreamWrite',
-    'DkStreamDelete',
-    'DkStreamMap',
-    'DkStreamUnmap',
-    'DkStreamSetLength',
-    'DkStreamFlush',
-    'DkSendHandle',
-    'DkReceiveHandle',
-    'DkStreamAttributesQuery',
-    'DkStreamAttributesQueryByHandle',
-    'DkStreamAttributesSetByHandle',
-    'DkStreamGetName',
-    'DkStreamChangeName',
-    'DkThreadCreate',
-    'DkThreadDelayExecution',
-    'DkThreadYieldExecution',
-    'DkThreadExit',
-    'DkThreadResume',
-    'DkSetExceptionHandler',
-    'DkExceptionReturn',
-    'DkMutexCreate',
-    'DkMutexRelease',
-    'DkNotificationEventCreate',
-    'DkSynchronizationEventCreate',
-    'DkEventSet',
-    'DkEventClear',
-    'DkObjectsWaitAny',
-    'DkObjectClose',
-    'DkSystemTimeQuery',
-    'DkRandomBitsRead',
-    'DkInstructionCacheFlush',
-    'DkSegmentRegister',
-    'DkMemoryAvailableQuota',
-    'DkCreatePhysicalMemoryChannel',
-    'DkPhysicalMemoryCommit',
-    'DkPhysicalMemoryMap']
-
-def check_symbols(res):
-    for sym in all_symbols:
-        found = False
-        for line in res[0].log:
-            if line and line.startswith(sym + ' = '):
-                if line == sym + ' = 0x0':
-                    return False
-                found = True
-                break
-        if not found:
-            return False
-    return True
-
-regression.add_check(name="Symbol Resolution", check=check_symbols);
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 25
Pal/regression/01_Exception.py

@@ -1,25 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Exception")
-
-regression.add_check(name="Exception Handling (Div-by-Zero)",
-    check=lambda res: any([line.startswith("Arithmetic Exception Handler") for line in res[0].log]))
-
-regression.add_check(name="Exception Handling (Memory Fault)",
-    check=lambda res: any([line.startswith("Memory Fault Exception Handler") for line in res[0].log]))
-
-regression.add_check(name="Exception Handler Swap",
-    check=lambda res: any([line.startswith("Arithmetic Exception Handler 1") for line in res[0].log]) and
-                      any([line.startswith("Arithmetic Exception Handler 2") for line in res[0].log]))
-
-regression.add_check(name="Exception Handling (Set Context)",
-    check=lambda res: any([line.startswith("Arithmetic Exception Handler 1") for line in res[0].log]))
-
-regression.add_check(name="Exception Handling (Red zone)",
-    check=lambda res: "Red zone test ok." in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 57
Pal/regression/02_Directory.py

@@ -1,57 +0,0 @@
-import os, sys, mmap, random, string, shutil
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-def prepare_dirs(args):
-    if os.path.exists("dir_exist.tmp"):
-        shutil.rmtree("dir_exist.tmp")
-    if os.path.exists("dir_nonexist.tmp"):
-        shutil.rmtree("dir_nonexist.tmp")
-    if os.path.exists("dir_delete.tmp"):
-        shutil.rmtree("dir_delete.tmp")
-
-    global dir_files
-    os.mkdir("dir_exist.tmp")
-    dir_files = []
-    for i in range(5):
-        file = ''.join([random.choice(string.ascii_letters) for i in range(8)])
-        f = open("dir_exist.tmp/" + file, "w")
-        f.close()
-        dir_files.append(file)
-
-    os.mkdir("dir_delete.tmp")
-
-
-regression = Regression(loader, "Directory", prepare_dirs)
-
-regression.add_check(name="Basic Directory Opening",
-    check=lambda res: "Directory Open Test 1 OK" in res[0].log and
-                      "Directory Open Test 2 OK" in res[0].log and
-                      "Directory Open Test 3 OK" in res[0].log)
-
-regression.add_check(name="Basic Directory Creation",
-    check=lambda res: "Directory Creation Test 1 OK" in res[0].log and
-                      "Directory Creation Test 2 OK" in res[0].log and
-                      "Directory Creation Test 3 OK" in res[0].log)
-
-def check_read(res):
-    global dir_files
-    for file in dir_files:
-        if ("Read Directory: " + file) not in res[0].log:
-            return False
-    return True
-
-regression.add_check(name="Directory Reading", check=check_read)
-
-regression.add_check(name="Directory Attribute Query",
-    check=lambda res: "Query: type = 7" in res[0].log)
-
-regression.add_check(name="Directory Attribute Query by Handle",
-    check=lambda res: "Query by Handle: type = 7" in res[0].log)
-
-regression.add_check(name="Directory Deletion",
-    check=lambda res: not os.path.exists("dir_delete.tmp"))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 15
Pal/regression/02_Event.py

@@ -1,15 +0,0 @@
-import os, sys
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Event")
-
-regression.add_check(name="Wait for event with too short timeout",
-    check=lambda res: "Wait with too short timeout ok." in res[0].log)
-
-regression.add_check(name="Wait for event with long enough timeout",
-    check=lambda res: "Wait with long enough timeout ok." in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 62
Pal/regression/02_File.py

@@ -1,62 +0,0 @@
-import os, sys, mmap, random, string
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-def prepare_files(args):
-    global file_exist
-    file_exist = open('File', 'rb').read()
-
-    if os.path.exists("file_nonexist.tmp"):
-        os.remove("file_nonexist.tmp")
-
-    with open("file_delete.tmp", "wb") as f:
-        f.write(file_exist)
-
-# Running File
-regression = Regression(loader, "File", prepare_files)
-
-regression.add_check(name="Basic File Opening",
-    check=lambda res: "File Open Test 1 OK" in res[0].log and
-                      "File Open Test 2 OK" in res[0].log and
-                      "File Open Test 3 OK" in res[0].log)
-
-regression.add_check(name="Basic File Creation",
-    check=lambda res: "File Creation Test 1 OK" in res[0].log and
-                      "File Creation Test 2 OK" in res[0].log and
-                      "File Creation Test 3 OK" in res[0].log)
-
-regression.add_check(name="File Reading",
-    check=lambda res: ("Read Test 1 (0th - 40th): " + file_exist[0:40].hex()) in res[0].log and
-                      ("Read Test 2 (0th - 40th): " + file_exist[0:40].hex()) in res[0].log and
-                      ("Read Test 3 (200th - 240th): " + file_exist[200:240].hex()) in res[0].log)
-
-def check_write(res):
-    global file_exist
-    with open("file_nonexist.tmp", "rb") as f:
-        file_nonexist = f.read()
-    return file_exist[0:40] == file_nonexist[200:240] and \
-           file_exist[200:240] == file_nonexist[0:40]
-
-regression.add_check(name="File Writing", check=check_write)
-
-regression.add_check(name="File Attribute Query",
-    check=lambda res: ("Query: type = 1, size = %d" % (len(file_exist))) in res[0].log)
-
-regression.add_check(name="File Attribute Query by Handle",
-    check=lambda res: ("Query by Handle: type = 1, size = %d" % (len(file_exist))) in res[0].log)
-
-regression.add_check(name="File Mapping",
-    check=lambda res: ("Map Test 1 (0th - 40th): " + file_exist[0:40].hex()) in res[0].log and
-                      ("Map Test 2 (200th - 240th): " + file_exist[200:240].hex()) in res[0].log and
-                      ("Map Test 3 (4096th - 4136th): " + file_exist[4096:4136].hex()) in res[0].log and
-                      ("Map Test 4 (4296th - 4336th): " + file_exist[4296:4336].hex()) in res[0].log)
-
-regression.add_check(name="Set File Length",
-    check=lambda res: os.stat("file_nonexist.tmp").st_size == mmap.ALLOCATIONGRANULARITY)
-
-regression.add_check(name="File Deletion",
-    check=lambda res: not os.path.exists("file_delete.tmp"))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 38
Pal/regression/02_Memory.py

@@ -1,38 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-sgx = os.environ.get('SGX_RUN') == '1'
-
-regression = Regression(loader, "Memory")
-
-regression.add_check(name="Memory Allocation",
-    check=lambda res: "Memory Allocation OK" in res[0].log)
-
-regression.add_check(name="Memory Allocation with Address",
-    check=lambda res: "Memory Allocation with Address OK" in res[0].log)
-
-# SGX1 does not support unmapping a page or changing its permission after
-# enclave init. Therefore the memory protection and deallocation tests will
-# fail. By utilizing SGX2 it's possibile to fix this.
-
-regression.add_check(name="Memory Protection", ignore_failure = sgx,
-    check=lambda res: "Memory Allocation Protection (RW) OK" in res[0].log and
-                      "Memory Protection (R) OK" in res[0].log)
-
-regression.add_check(name="Memory Deallocation", ignore_failure = sgx,
-    check=lambda res: "Memory Deallocation OK" in res[0].log)
-
-def check_quota(res):
-    for line in res[0].log:
-        if line.startswith("Total Memory:"):
-            return line != "Total Memory: 0"
-    return False
-
-regression.add_check(name="Get Memory Total Quota", check=check_quota)
-
-regression.add_check(name="Get Memory Available Quota",
-    check=lambda res: "Get Memory Available Quota OK" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 38
Pal/regression/02_Misc.py

@@ -1,38 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Misc")
-
-regression.add_check(name="Query System Time",
-    check=lambda res: "Query System Time OK" in res[0].log)
-
-regression.add_check(name="Delay Execution for 10000 Microseconds",
-    check=lambda res: "Delay Execution for 10000 Microseconds OK" in res[0].log)
-
-regression.add_check(name="Delay Execution for 3 Seconds",
-    check=lambda res: "Delay Execution for 3 Seconds OK" in res[0].log)
-
-regression.add_check(name="Generate Random Bits",
-    check=lambda res: "Generate Random Bits OK" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-regression = Regression(loader, "Hex")
-
-regression.add_check(name="Hex 2 String Helper Function",
-                     check=lambda res: "Hex test 1 is deadbeef" in res[0].log and \
-                     "Hex test 2 is cdcdcdcdcdcdcdcd" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-regression = Regression(loader, "Exit")
-
-regression.add_check(name="Exit Code Propagation",
-    check=lambda res: 112 == res[0].code)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 24
Pal/regression/02_Pipe.py

@@ -1,24 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Pipe")
-
-regression.add_check(name="Pipe Creation",
-    check=lambda res: "Pipe Creation 1 OK" in res[0].log)
-
-regression.add_check(name="Pipe Attributes",
-    check=lambda res: "Pipe Attribute Query 1 on pipesrv returned OK" in res[0].log)
-
-regression.add_check(name="Pipe Connection",
-    check=lambda res: "Pipe Connection 1 OK" in res[0].log)
-
-regression.add_check(name="Pipe Transmission",
-    check=lambda res: "Pipe Write 1 OK" in res[0].log and
-                      "Pipe Read 1: Hello World 1" in res[0].log and
-                      "Pipe Write 2 OK" in res[0].log and
-                      "Pipe Read 2: Hello World 2" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 18
Pal/regression/02_Semaphore.py

@@ -1,18 +0,0 @@
-import os, sys, mmap, random, string
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-# Running Semaphore
-regression = Regression(loader, "Semaphore")
-
-regression.add_check(name="Semaphore: Timeout on Locked Semaphores",
-    check=lambda res: "Locked binary semaphore timed out (1000)." in res[0].log and
-                      "Locked binary semaphore timed out (0)." in res[0].log)
-
-regression.add_check(name="Semaphore: Acquire Unlocked Semaphores",
-    check=lambda res: "Locked binary semaphore successfully (-1)." in res[0].log and
-                      "Locked binary semaphore successfully (0)." in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 39
Pal/regression/02_Socket.py

@@ -1,39 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Socket")
-
-regression.add_check(name="TCP Socket Creation",
-    check=lambda res: "TCP Creation 1 OK" in res[0].log)
-
-regression.add_check(name="TCP Socket Connection",
-    check=lambda res: "TCP Connection 1 OK" in res[0].log)
-
-regression.add_check(name="TCP Socket Transmission",
-    check=lambda res: "TCP Write 1 OK" in res[0].log and
-                      "TCP Read 1: Hello World 1" in res[0].log and
-                      "TCP Write 2 OK" in res[0].log and
-                      "TCP Read 2: Hello World 2" in res[0].log)
-
-regression.add_check(name="UDP Socket Creation",
-    check=lambda res: "UDP Creation 1 OK" in res[0].log)
-
-regression.add_check(name="UDP Socket Connection",
-    check=lambda res: "UDP Connection 1 OK" in res[0].log)
-
-regression.add_check(name="UDP Socket Transmission",
-    check=lambda res: "UDP Write 1 OK" in res[0].log and
-                      "UDP Read 1: Hello World 1" in res[0].log and
-                      "UDP Write 2 OK" in res[0].log and
-                      "UDP Read 2: Hello World 2" in res[0].log)
-
-regression.add_check(name="Bound UDP Socket Transmission",
-    check=lambda res: "UDP Write 3 OK" in res[0].log and
-                      "UDP Read 3: Hello World 1" in res[0].log and
-                      "UDP Write 4 OK" in res[0].log and
-                      "UDP Read 4: Hello World 2" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 45
Pal/regression/02_Thread.py

@@ -1,45 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-sgx = os.environ.get('SGX_RUN') == '1'
-
-regression = Regression(loader, "Thread")
-
-regression.add_check(name="Thread Creation",
-    check=lambda res: "Child Thread Created" in res[0].log and
-                      "Run in Child Thread: Hello World" in res[0].log)
-
-regression.add_check(name="Multiple Threads Run in Parallel",
-    check=lambda res: "Threads Run in Parallel OK" in res[0].log)
-
-regression.add_check(name="Set Thread Private Segment Register",
-    check=lambda res: "Private Message (FS Segment) 1: Hello World 1" in res[0].log and
-                      "Private Message (FS Segment) 2: Hello World 2" in res[0].log)
-
-regression.add_check(name="Thread Exit",
-    check=lambda res: "Child Thread Exited" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-regression = Regression(loader, "Thread2")
-
-regression.add_check(name="Thread Cleanup: Exit by return.",
-    check=lambda res: "Thread 2 ok." in res[0].log)
-
-# The 2 following tests are currently broken on SGX because TCS slots are not
-# reused yet (needed because of thread limit), see issue #517.
-
-regression.add_check(name="Thread Cleanup: Exit by DkThreadExit.",
-    check=lambda res: "Thread 3 ok." in res[0].log and
-                      "Exiting thread 3 failed." not in res[0].log,
-    ignore_failure=sgx)
-
-regression.add_check(name="Thread Cleanup: Can still start threads.",
-    check=lambda res: "Thread 4 ok." in res[0].log,
-    ignore_failure=sgx)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 59
Pal/regression/03_Process.py

@@ -1,59 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "Process", timeout=8000)
-
-def check_times(target, lines, times):
-    count = 0
-    for line in lines:
-        if target == line:
-            count += 1
-    return count == times
-
-regression.add_check(name="Process Creation",
-    check=lambda res: check_times("Child Process Created", res[0].log, 3))
-
-regression.add_check(name="Process Creation Arguments",
-    check=lambda res: check_times("argv[0] = Process", res[0].log, 3) and
-                      check_times("argv[1] = Child",   res[0].log, 3))
-
-regression.add_check(name="Process Channel Transmission",
-    check=lambda res: check_times("Process Write 1 OK",            res[0].log, 3) and
-                      check_times("Process Read 1: Hello World 1", res[0].log, 3) and
-                      check_times("Process Write 2 OK",            res[0].log, 3) and
-                      check_times("Process Read 2: Hello World 2", res[0].log, 3))
-
-def check_broadcast_result(res):
-    if not check_times("Warning: broadcast stream is not open. "
-                       "Do you have a multicast route configured?",
-                       res[0].log, 0):
-        print("Could not open broadcast stream. Dou you have a multicast route configured?")
-
-    return (check_times("Broadcast Write OK",            res[0].log, 1) and
-            check_times("Broadcast Read: Hello World 1", res[0].log, 3))
-
-regression.add_check(name="Multi-Process Broadcast Channel Transmission",
-                     check=check_broadcast_result)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-regression = Regression(loader, "Process2")
-
-regression.add_check(name="Process Creation with a Different Binary",
-    check=lambda res: check_times("User Program Started", res[0].log, 1))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-
-regression = Regression(loader, "Process3")
-
-regression.add_check(name="Process Creation without Executable",
-    check=lambda res: check_times("Binary 1 Preloaded", res[0].log, 2) and
-                      check_times("Binary 2 Preloaded", res[0].log, 2))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)
-

+ 0 - 68
Pal/regression/04_Ipc.py

@@ -1,68 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-sgx = os.environ.get('SGX_RUN') == '1'
-
-if sgx:
-    print("Bulk IPC not supported on SGX")
-    exit(0)
-
-## XXX Should really be running these tests as part of CI
-if not os.path.exists('/dev/gipc'):
-    print("GIPC not loaded; skipping these tests\n")
-    exit(0)
-
-def prepare_files(args):
-    with open("ipc_mapping.tmp", "w") as f:
-        f.write("Hello World")
-        os.ftruncate(f.fileno(), mmap.PAGESIZE)
-
-regression = Regression(loader, "Ipc", prepare_files)
-
-def check_times(target, lines, times):
-    count = 0
-    for line in lines:
-        if target == line:
-            count += 1
-    return count == times
-
-regression.add_check(name="Create and Join Physical Memory Bulk Copy Store",
-    check=lambda res: check_times("Create Physical Memory Store OK", res[0].log, 5) and
-                      check_times("Join Physical Memory Store OK",   res[0].log, 5))
-
-regression.add_check(name="Map and Commit Anonymous Physical Memory",
-    check=lambda res: "[Test 1] Physical Memory Commit OK" in res[0].log and
-                      "[Test 1] Physical Memory Map   : Hello World" in res[0].log)
-
-regression.add_check(name="Transfer Anonymous Physical Memory as Copy-on-Write",
-    check=lambda res: "[Test 1] Sender   After  Commit: Hello World, Alice" in res[0].log and
-                      "[Test 1] Sender   Before Map   : Alice, Hello World" in res[0].log and
-                      "[Test 1] Receiver After  Map   : Hello World, Bob"   in res[0].log and
-                      "[Test 1] Sender   After  Map   : Alice, Hello World" in res[0].log)
-
-regression.add_check(name="Map and Commit Untouched Physical Memory",
-    check=lambda res: "[Test 2] Physical Memory Commit OK" in res[0].log and
-                      "[Test 2] Physical Memory Map   : "                   in res[0].log and
-                      "[Test 2] Sender   After  Commit: Hello World, Alice" in res[0].log and
-                      "[Test 2] Sender   Before Map   : Alice, Hello World" in res[0].log and
-                      "[Test 2] Receiver After  Map   : Hello World, Bob"   in res[0].log and
-                      "[Test 2] Sender   After  Map   : Alice, Hello World" in res[0].log)
-
-regression.add_check(name="Map and Commit File-Backed Physical Memory",
-    check=lambda res: "[Test 3] Physical Memory Commit OK" in res[0].log and
-                      "[Test 3] Physical Memory Map   : Hello World"        in res[0].log and
-                      "[Test 3] Sender   After  Commit: Hello World"        in res[0].log and
-                      "[Test 3] Receiver After  Map   : Hello World, Bob"   in res[0].log and
-                      "[Test 3] Sender   After  Map   : Hello World"        in res[0].log)
-
-regression.add_check(name="Map and Commit File-Backed Physical Memory Beyond File Size",
-    check=lambda res: "[Test 4] Physical Memory Commit OK" in res[0].log and
-                      "[Test 4] Physical Memory Map   : Memory Fault" in res[0].log)
-
-regression.add_check(name="Map and Commit Huge Physical Memory",
-    check=lambda res: "[Test 5] Physical Memory Commit OK" in res[0].log and
-                      "[Test 5] Physical Memory Map   : Hello World" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 29
Pal/regression/04_SendHandle.py

@@ -1,29 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-regression = Regression(loader, "SendHandle")
-
-def check_times(target, lines, times):
-    count = 0
-    for line in lines:
-        if target == line:
-            count += 1
-    return count == times
-
-regression.add_check(name="Send and Receive Handles across Processes",
-    check=lambda res: check_times("Send Handle OK", res[0].log, 3) and
-                      check_times("Receive Handle OK", res[0].log, 3))
-
-regression.add_check(name="Send Pipe Handle",
-        check=lambda res: check_times("Receive Pipe Handle: Hello World", res[0].log, 1))
-
-regression.add_check(name="Send Socket Handle",
-        check=lambda res: check_times("Receive Socket Handle: Hello World", res[0].log, 1))
-
-regression.add_check(name="Send File Handle",
-        check=lambda res: check_times("Receive File Handle: Hello World", res[0].log, 1))
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 0 - 50
Pal/regression/05_Process.py

@@ -1,50 +0,0 @@
-## This test is specifically for the reference monitor code, not process creation in general.
-## It is not well-tested right now, but keep the tests around for future work.
-
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_SEC']
-
-if not os.path.exists(loader):
-    print("Reference monitor mode is not available on this platform")
-    exit(0)
-
-regression = Regression(loader, "Process")
-
-def check_times(target, lines, times):
-    count = 0
-    for line in lines:
-        if target == line:
-            count += 1
-    return count == times
-
-regression.add_check(name="Process Creation",
-    check=lambda res: check_times("Child Process Created", res[0].log, 3))
-
-regression.add_check(name="Process Creation Arguments",
-    check=lambda res: check_times("argv[0] = Process", res[0].log, 3) and
-                      check_times("argv[1] = Child",   res[0].log, 3))
-
-regression.add_check(name="Process Channel Transmission",
-    check=lambda res: check_times("Process Write 1 OK",            res[0].log, 3) and
-                      check_times("Process Read 1: Hello World 1", res[0].log, 3) and
-                      check_times("Process Write 2 OK",            res[0].log, 3) and
-                      check_times("Process Read 2: Hello World 2", res[0].log, 3))
-
-regression.add_check(name="Multi-Process Broadcast Channel Transmission",
-    check=lambda res: check_times("Broadcast Write OK",            res[0].log, 1) and
-                      check_times("Broadcast Read: Hello World 1", res[0].log, 3))
-
-rv = regression.run_checks()
-## dp : For now, let these tests fail.  We should fix this.
-#if rv: sys.exit(rv)
-
-regression = Regression(loader, "Process2")
-
-regression.add_check(name="Process Creation without Executable",
-    check=lambda res: check_times("Binary 1 Preloaded", res[0].log, 2) and
-                      check_times("Binary 2 Preloaded", res[0].log, 2))
-
-rv = regression.run_checks()
-#if rv: sys.exit(rv)

+ 0 - 61
Pal/regression/05_Reference_Monitor.py

@@ -1,61 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_SEC']
-
-if not os.path.exists(loader):
-    print("Reference monitor mode is not available on this platform")
-    exit(0)
-
-# Running Bootstrap
-regression = Regression(loader, "Bootstrap")
-
-regression.add_check(name="Basic Bootstrapping",
-    check=lambda res: "User Program Started" in res[0].log)
-
-regression.add_check(name="Control Block: Executable Name",
-    check=lambda res: "Loaded Executable: file:Bootstrap" in res[0].log)
-
-regression.add_check(name="Control Block: Default Manifest",
-    check=lambda res: "Loaded Manifest: file:manifest" in res[0].log)
-
-regression.add_check(name="One Argument Given",
-    check=lambda res: "# of Arguments: 1" in res[0].log and \
-            "argv[0] = file:Bootstrap" in res[0].log)
-
-regression.add_check(name="Five Arguments Given",
-    args = ['a', 'b', 'c', 'd'],
-    check=lambda res: "# of Arguments: 5" in res[0].log and \
-           "argv[0] = file:Bootstrap" in res[0].log and \
-           "argv[1] = a" in res[0].log and "argv[2] = b" in res[0].log and \
-           "argv[3] = c" in res[0].log and "argv[4] = d" in res[0].log)
-
-regression.add_check(name="Control Block: Debug Stream (Inline)",
-    check=lambda res: "Written to Debug Stream" in res[0].out)
-
-regression.add_check(name="Control Block: Page Size",
-    check=lambda res: ("Page Size: %d" % (mmap.PAGESIZE)) in res[0].log)
-
-regression.add_check(name="Control Block: Allocation Alignment",
-    check=lambda res: ("Allocation Alignment: %d" % (mmap.ALLOCATIONGRANULARITY)) in res[0].log)
-
-regression.add_check(name="Control Block: Executable Range",
-    check=lambda res: "Executable Range OK" in res[0].log)
-
-rv = regression.run_checks()
-## dp: For now, let the ref monitor checks fail; we should fix this
-#if rv: sys.exit(rv)
-
-# Running Bootstrap3
-regression = Regression(loader, "Bootstrap3")
-
-regression.add_check(name="Preload Libraries",
-    check=lambda res: "Binary 1 Preloaded" in res[0].log and
-                      "Binary 2 Preloaded" in res[0].log)
-
-regression.add_check(name="Preload Libraries Linking",
-    check=lambda res: "Preloaded Function 1 Called" in res[0].log and
-                      "Preloaded Function 2 Called" in res[0].log)
-
-rv = regression.run_checks()
-#if rv: sys.exit(rv)

+ 0 - 23
Pal/regression/06_AvxDisable.py

@@ -1,23 +0,0 @@
-import os, sys, mmap
-from regression import Regression
-
-loader = os.environ['PAL_LOADER']
-
-sgx = os.environ.get('SGX_RUN') == '1'
-
-def manifest_file(file):
-    if sgx:
-        return file + '.manifest.sgx'
-    else:
-        return file + '.manifest'
-
-if not sgx:
-  sys.exit(0)
-# Running AvxDisable
-regression = Regression(loader, "AvxDisable")
-
-regression.add_check(name="Disable AVX bit in XFRM",
-    check=lambda res: "Illegal instruction executed in enclave" in res[0].log)
-
-rv = regression.run_checks()
-if rv: sys.exit(rv)

+ 7 - 18
Pal/regression/Makefile

@@ -75,26 +75,15 @@ else
 $(preloads) $(executables):
 endif
 
-PYTHONENV = "PYTHONPATH=../../Scripts"
+export PYTHONPATH=../../Scripts
 
-ifeq ($(SGX_RUN),1)
-	PYTHONENV += "TIMEOUT=20000"
-endif
+.PHONY: regression
+regression:
+	$(RM) pal-regression.xml
+	$(MAKE) pal-regression.xml
 
-regression: $(call expand_target,$(target))
-	@printf "\n\nBasic Bootstrapping:\n"
-	@for f in $(wildcard 00_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\nException Handling:\n"
-	@for f in $(wildcard 01_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\nSingle-Process Functionalities:\n"
-	@for f in $(wildcard 02_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\nProcess Creation:\n"
-	@for f in $(wildcard 03_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\nMulti-Process Functionalities:\n"
-	@for f in $(wildcard 04_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\nReference Monitor (Optional):\n"
-	@for f in $(wildcard 05_*.py); do env $(PYTHONENV) python3 $$f || exit $$?; done
-	@printf "\n\n"
+pal-regression.xml: test_pal.py $(call expand_target,$(target))
+	python3 -m pytest --junit-xml $@ -v test_pal.py
 
 .PHONY: clean
 clean:

+ 625 - 0
Pal/regression/test_pal.py

@@ -0,0 +1,625 @@
+#!/usr/bin/env python3
+
+import ast
+import collections
+import mmap
+import os
+import pathlib
+import random
+import shutil
+import string
+import subprocess
+import unittest
+
+from regression import (
+    HAS_SGX,
+    RegressionTestCase,
+    SandboxTestCase,
+    expectedFailureIf,
+)
+
+CPUINFO_FLAGS_WHITELIST = [
+    'fpu', 'vme', 'de', 'pse', 'tsc', 'msr', 'pae', 'mce', 'cx8', 'apic', 'sep',
+    'mtrr', 'pge', 'mca', 'cmov', 'pat', 'pse36', 'pn', 'clflush', 'dts',
+    'acpi', 'mmx', 'fxsr', 'sse', 'sse2', 'ss', 'ht', 'tm', 'ia64', 'pbe',
+]
+
+
+class TC_00_AtomicMath(RegressionTestCase):
+    def test_000_atomic_math(self):
+        stdout, stderr = self.run_binary(['AtomicMath'])
+        self.assertIn('Subtract INT_MIN: Both values match 2147483648', stderr)
+        self.assertIn('Subtract INT_MAX: Both values match -2147483647', stderr)
+        self.assertIn('Subtract LLONG_MIN: Both values match -9223372036854775808', stderr)
+        self.assertIn('Subtract LLONG_MAX: Both values match -9223372036854775807', stderr)
+
+class TC_01_Bootstrap(RegressionTestCase):
+    def test_100_basic_boostrapping(self):
+        stdout, stderr = self.run_binary(['Bootstrap'])
+
+        # Basic Bootstrapping
+        self.assertIn('User Program Started', stderr)
+
+        # Control Block: Executable Name
+        self.assertIn('Loaded Executable: file:Bootstrap', stderr)
+
+        # One Argument Given
+        self.assertIn('# of Arguments: 1', stderr)
+        self.assertIn('argv[0] = file:Bootstrap', stderr)
+
+        # Control Block: Debug Stream (Inline)
+        self.assertIn('Written to Debug Stream', stdout)
+
+        # Control Block: Page Size
+        self.assertIn('Page Size: {}'.format(mmap.PAGESIZE), stderr)
+
+        # Control Block: Allocation Alignment
+        self.assertIn('Allocation Alignment: {}'.format(mmap.ALLOCATIONGRANULARITY), stderr)
+
+        # Control Block: Executable Range
+        self.assertIn('Executable Range OK', stderr)
+
+    def test_101_basic_boostrapping_five_arguments(self):
+        stdout, stderr = self.run_binary(['Bootstrap', 'a', 'b', 'c', 'd'])
+
+        # Five Arguments Given
+        self.assertIn('# of Arguments: 5', stderr)
+        self.assertIn('argv[1] = a', stderr)
+        self.assertIn('argv[2] = b', stderr)
+        self.assertIn('argv[3] = c', stderr)
+        self.assertIn('argv[4] = d', stderr)
+
+    def test_102_cpuinfo(self):
+        with open('/proc/cpuinfo') as file:
+            cpuinfo = file.read().strip().split('\n\n')[-1]
+        cpuinfo = dict(map(str.strip, line.split(':'))
+            for line in cpuinfo.split('\n'))
+        if 'flags' in cpuinfo:
+            cpuinfo['flags'] = ' '.join(flag for flag in cpuinfo['flags']
+                if flag in CPUINFO_FLAGS_WHITELIST)
+
+        stdout, stderr = self.run_binary(['Bootstrap'])
+
+        self.assertIn('CPU num: {}'.format(int(cpuinfo['processor']) + 1),
+            stderr)
+        self.assertIn('CPU vendor: {[vendor_id]}'.format(cpuinfo), stderr)
+        self.assertIn('CPU brand: {[model name]}'.format(cpuinfo), stderr)
+        self.assertIn('CPU family: {[cpu family]}'.format(cpuinfo), stderr)
+        self.assertIn('CPU model: {[model]}'.format(cpuinfo), stderr)
+        self.assertIn('CPU stepping: {[stepping]}'.format(cpuinfo), stderr)
+        self.assertIn('CPU flags: {[flags]}'.format(cpuinfo), stderr)
+
+    def test_103_dotdot(self):
+        stdout, stderr = self.run_binary(['..Bootstrap'])
+        self.assertIn('User Program Started', stderr)
+
+    def test_104_manifest_as_executable_name(self):
+        manifest = self.get_manifest('Bootstrap2')
+        stdout, stderr = self.run_binary([manifest])
+        self.assertIn('User Program Started', stderr)
+        self.assertIn('Loaded Manifest: file:' + manifest, stderr)
+
+    def test_105_manifest_as_argument(self):
+        manifest = self.get_manifest('Bootstrap4')
+        stdout, stderr = self.run_binary([manifest])
+        self.assertIn('Loaded Manifest: file:' + manifest, stderr)
+        self.assertIn('Loaded Executable: file:Bootstrap', stderr)
+
+    def test_106_manifest_with_shebang(self):
+        manifest = self.get_manifest('Bootstrap4')
+        stdout, stderr = self.run_binary(['./' + manifest])
+        self.assertIn('Loaded Manifest: file:' + manifest, stderr)
+        self.assertIn('Loaded Executable: file:Bootstrap', stderr)
+        self.assertIn('argv[0] = Bootstrap', stderr)
+
+    def test_110_preload_libraries(self):
+        stdout, stderr = self.run_binary(['Bootstrap3'])
+        self.assertIn('Binary 1 Preloaded', stderr)
+        self.assertIn('Binary 2 Preloaded', stderr)
+        self.assertIn('Preloaded Function 1 Called', stderr)
+        self.assertIn('Preloaded Function 2 Called', stderr)
+
+    def test_111_preload_libraries(self):
+        # Bootstrap without Executable but Preload Libraries
+        stdout, stderr = self.run_binary([self.get_manifest('Bootstrap5')])
+        self.assertIn('Binary 1 Preloaded', stderr)
+        self.assertIn('Binary 2 Preloaded', stderr)
+
+    @unittest.skipUnless(HAS_SGX, 'this test requires SGX')
+    def test_120_8gb_enclave(self):
+        manifest = self.get_manifest('Bootstrap6')
+        stdout, stderr = self.run_binary([manifest], timeout=240)
+        self.assertIn('Loaded Manifest: file:' + manifest, stderr)
+        self.assertIn('Executable Range OK', stderr)
+
+    def test_130_large_number_of_items_in_manifest(self):
+        stdout, stderr = self.run_binary([self.get_manifest('Bootstrap7')])
+        self.assertIn('key1000=na', stderr)
+        self.assertIn('key1=na', stderr)
+
+    def test_140_missing_executable_and_manifest(self):
+        try:
+            stdout, stderr = self.run_binary(['fakenews'])
+            self.fail(
+                'expected non-zero returncode, stderr: {!r}'.format(stderr))
+        except subprocess.CalledProcessError as e:
+            self.assertIn('USAGE: ', e.stderr.decode())
+
+class TC_02_Symbols(RegressionTestCase):
+    ALL_SYMBOLS = [
+        'DkVirtualMemoryAlloc',
+        'DkVirtualMemoryFree',
+        'DkVirtualMemoryProtect',
+        'DkProcessCreate',
+        'DkProcessExit',
+        'DkProcessSandboxCreate',
+        'DkStreamOpen',
+        'DkStreamWaitForClient',
+        'DkStreamRead',
+        'DkStreamWrite',
+        'DkStreamDelete',
+        'DkStreamMap',
+        'DkStreamUnmap',
+        'DkStreamSetLength',
+        'DkStreamFlush',
+        'DkSendHandle',
+        'DkReceiveHandle',
+        'DkStreamAttributesQuery',
+        'DkStreamAttributesQueryByHandle',
+        'DkStreamAttributesSetByHandle',
+        'DkStreamGetName',
+        'DkStreamChangeName',
+        'DkThreadCreate',
+        'DkThreadDelayExecution',
+        'DkThreadYieldExecution',
+        'DkThreadExit',
+        'DkThreadResume',
+        'DkSetExceptionHandler',
+        'DkExceptionReturn',
+        'DkMutexCreate',
+        'DkMutexRelease',
+        'DkNotificationEventCreate',
+        'DkSynchronizationEventCreate',
+        'DkEventSet',
+        'DkEventClear',
+        'DkObjectsWaitAny',
+        'DkObjectClose',
+        'DkSystemTimeQuery',
+        'DkRandomBitsRead',
+        'DkInstructionCacheFlush',
+        'DkSegmentRegister',
+        'DkMemoryAvailableQuota',
+        'DkCreatePhysicalMemoryChannel',
+        'DkPhysicalMemoryCommit',
+        'DkPhysicalMemoryMap',
+    ]
+
+    def test_000_symbols(self):
+        stdout, stderr = self.run_binary(['Symbols'])
+        found_symbols = dict(line.split(' = ')
+            for line in stderr.strip().split('\n'))
+        self.assertCountEqual(found_symbols, self.ALL_SYMBOLS)
+        for k, v in found_symbols.items():
+            v = ast.literal_eval(v)
+            self.assertNotEqual(v, 0, 'symbol {} has value 0'.format(k))
+
+class TC_10_Exception(RegressionTestCase):
+    def test_000_exception(self):
+        stdout, stderr = self.run_binary(['Exception'])
+
+        # Exception Handling (Div-by-Zero)
+        self.assertIn('Arithmetic Exception Handler', stderr)
+
+        # Exception Handling (Memory Fault)
+        self.assertIn('Memory Fault Exception Handler', stderr)
+
+        # Exception Handler Swap
+        self.assertIn('Arithmetic Exception Handler 1', stderr)
+        self.assertIn('Arithmetic Exception Handler 2', stderr)
+
+        # Exception Handling (Set Context)
+        self.assertIn('Arithmetic Exception Handler 1', stderr)
+
+        # Exception Handling (Red zone)
+        self.assertIn('Red zone test ok.', stderr)
+
+class TC_20_SingleProcess(RegressionTestCase):
+    def test_000_exit_code(self):
+        with self.expect_returncode(112):
+            self.run_binary(['Exit'])
+
+    def test_100_file(self):
+        try:
+            pathlib.Path('file_nonexist.tmp').unlink()
+        except FileNotFoundError:
+            pass
+        pathlib.Path('file_delete.tmp').touch()
+
+        with open('File', 'rb') as file:
+            file_exist = file.read()
+
+        stdout, stderr = self.run_binary(['File'])
+
+        # Basic File Opening
+        self.assertIn('File Open Test 1 OK', stderr)
+        self.assertIn('File Open Test 2 OK', stderr)
+        self.assertIn('File Open Test 3 OK', stderr)
+
+        # Basic File Creation
+        self.assertIn('File Creation Test 1 OK', stderr)
+        self.assertIn('File Creation Test 2 OK', stderr)
+        self.assertIn('File Creation Test 3 OK', stderr)
+
+        # File Reading
+        self.assertIn('Read Test 1 (0th - 40th): {}'.format(
+            file_exist[0:40].hex()), stderr)
+        self.assertIn('Read Test 2 (0th - 40th): {}'.format(
+            file_exist[0:40].hex()), stderr)
+        self.assertIn('Read Test 3 (200th - 240th): {}'.format(
+            file_exist[200:240].hex()), stderr)
+
+        # File Writing
+        with open('file_nonexist.tmp', 'rb') as file:
+            file_nonexist = file.read()
+
+        self.assertEqual(file_exist[0:40], file_nonexist[200:240])
+        self.assertEqual(file_exist[200:240], file_nonexist[0:40])
+
+        # File Attribute Query
+        self.assertIn(
+            'Query: type = 1, size = {}'.format(len(file_exist)), stderr)
+
+        # File Attribute Query by Handle
+        self.assertIn(
+            'Query by Handle: type = 1, size = {}'.format(len(file_exist)),
+            stderr)
+
+        # File Mapping
+        self.assertIn(
+            'Map Test 1 (0th - 40th): {}'.format(file_exist[0:40].hex()),
+            stderr)
+        self.assertIn(
+            'Map Test 2 (200th - 240th): {}'.format(file_exist[200:240].hex()),
+            stderr)
+        self.assertIn(
+            'Map Test 3 (4096th - 4136th): {}'.format(file_exist[4096:4136].hex()),
+            stderr)
+        self.assertIn(
+            'Map Test 4 (4296th - 4336th): {}'.format(file_exist[4296:4336].hex()),
+            stderr)
+
+        # Set File Length
+        self.assertEqual(
+            pathlib.Path('file_nonexist.tmp').stat().st_size,
+            mmap.ALLOCATIONGRANULARITY)
+
+        # File Deletion
+        self.assertFalse(pathlib.Path('file_delete.tmp').exists())
+
+    def test_110_directory(self):
+        for path in ['dir_exist.tmp', 'dir_nonexist.tmp', 'dir_delete.tmp']:
+            try:
+                shutil.rmtree(path)
+            except FileNotFoundError:
+                pass
+
+        path = pathlib.Path('dir_exist.tmp')
+        files = [path / ''.join(random.choice(string.ascii_letters)
+                for j in range(8))
+            for i in range(5)]
+
+        path.mkdir()
+        for p in files:
+            p.touch()
+        pathlib.Path('dir_delete.tmp').mkdir()
+
+        stdout, stderr = self.run_binary(['Directory'])
+
+        # Basic Directory Opening
+        self.assertIn('Directory Open Test 1 OK', stderr)
+        self.assertIn('Directory Open Test 2 OK', stderr)
+        self.assertIn('Directory Open Test 3 OK', stderr)
+
+        # Basic Directory Creation
+        self.assertIn('Directory Creation Test 1 OK', stderr)
+        self.assertIn('Directory Creation Test 2 OK', stderr)
+        self.assertIn('Directory Creation Test 3 OK', stderr)
+
+        # Directory Reading
+        for p in files:
+            self.assertIn('Read Directory: {}'.format(p.name), stderr)
+
+        # Directory Attribute Query
+        self.assertIn('Query: type = 7', stderr)
+
+        # Directory Attribute Query by Handle
+        self.assertIn('Query by Handle: type = 7', stderr)
+
+        # Directory Deletion
+        self.assertFalse(pathlib.Path('dir_delete.tmp').exists())
+
+    def test_200_event(self):
+        stdout, stderr = self.run_binary(['Event'])
+        self.assertIn('Wait with too short timeout ok.', stderr)
+        self.assertIn('Wait with long enough timeout ok.', stderr)
+
+    def test_210_semaphore(self):
+        stdout, stderr = self.run_binary(['Semaphore'])
+
+        # Semaphore: Timeout on Locked Semaphores
+        self.assertIn('Locked binary semaphore timed out (1000).', stderr)
+        self.assertIn('Locked binary semaphore timed out (0).', stderr)
+
+        # Semaphore: Acquire Unlocked Semaphores
+        self.assertIn('Locked binary semaphore successfully (-1).', stderr)
+        self.assertIn('Locked binary semaphore successfully (0).', stderr)
+
+    def test_300_memory(self):
+        stdout, stderr = self.run_binary(['Memory'])
+
+        # Memory Allocation
+        self.assertIn('Memory Allocation OK', stderr)
+
+        # Memory Allocation with Address
+        self.assertIn('Memory Allocation with Address OK', stderr)
+
+        # Get Memory Total Quota
+        self.assertIn('Total Memory:', stderr)
+        for line in stderr.split('\n'):
+            if line.startswith('Total Memory:'):
+                self.assertNotEqual(line, 'Total Memory: 0')
+
+        # Get Memory Available Quota
+        self.assertIn('Get Memory Available Quota OK', stderr)
+
+    @expectedFailureIf(HAS_SGX)
+    def test_301_memory_nosgx(self):
+        stdout, stderr = self.run_binary(['Memory'])
+
+        # SGX1 does not support unmapping a page or changing its permission
+        # after enclave init. Therefore the memory protection and deallocation
+        # tests will fail. By utilizing SGX2 it's possibile to fix this.
+
+        # Memory Protection
+        self.assertIn('Memory Allocation Protection (RW) OK', stderr)
+        self.assertIn('Memory Protection (R) OK', stderr)
+
+        # Memory Deallocation
+        self.assertIn('Memory Deallocation OK', stderr)
+
+    def test_400_pipe(self):
+        stdout, stderr = self.run_binary(['Pipe'])
+
+        # Pipe Creation
+        self.assertIn('Pipe Creation 1 OK', stderr)
+
+        # Pipe Attributes
+        self.assertIn('Pipe Attribute Query 1 on pipesrv returned OK', stderr)
+
+        # Pipe Connection
+        self.assertIn('Pipe Connection 1 OK', stderr)
+
+        # Pipe Transmission
+        self.assertIn('Pipe Write 1 OK', stderr)
+        self.assertIn('Pipe Read 1: Hello World 1', stderr)
+        self.assertIn('Pipe Write 2 OK', stderr)
+        self.assertIn('Pipe Read 2: Hello World 2', stderr)
+
+    def test_410_socket(self):
+        stdout, stderr = self.run_binary(['Socket'])
+
+        # TCP Socket Creation
+        self.assertIn('TCP Creation 1 OK', stderr)
+
+        # TCP Socket Connection
+        self.assertIn('TCP Connection 1 OK', stderr)
+
+        # TCP Socket Transmission
+        self.assertIn('TCP Write 1 OK', stderr)
+        self.assertIn('TCP Read 1: Hello World 1', stderr)
+        self.assertIn('TCP Write 2 OK', stderr)
+        self.assertIn('TCP Read 2: Hello World 2', stderr)
+
+        # UDP Socket Creation
+        self.assertIn('UDP Creation 1 OK', stderr)
+
+        # UDP Socket Connection
+        self.assertIn('UDP Connection 1 OK', stderr)
+
+        # UDP Socket Transmission
+        self.assertIn('UDP Write 1 OK', stderr)
+        self.assertIn('UDP Read 1: Hello World 1', stderr)
+        self.assertIn('UDP Write 2 OK', stderr)
+        self.assertIn('UDP Read 2: Hello World 2', stderr)
+
+        # Bound UDP Socket Transmission
+        self.assertIn('UDP Write 3 OK', stderr)
+        self.assertIn('UDP Read 3: Hello World 1', stderr)
+        self.assertIn('UDP Write 4 OK', stderr)
+        self.assertIn('UDP Read 4: Hello World 2', stderr)
+
+    def test_500_thread(self):
+        stdout, stderr = self.run_binary(['Thread'])
+
+        # Thread Creation
+        self.assertIn('Child Thread Created', stderr)
+        self.assertIn('Run in Child Thread: Hello World', stderr)
+
+        # Multiple Threads Run in Parallel
+        self.assertIn('Threads Run in Parallel OK', stderr)
+
+        # Set Thread Private Segment Register
+        self.assertIn('Private Message (FS Segment) 1: Hello World 1', stderr)
+        self.assertIn('Private Message (FS Segment) 2: Hello World 2', stderr)
+
+        # Thread Exit
+        self.assertIn('Child Thread Exited', stderr)
+
+    def test_510_thread2(self):
+        stdout, stderr = self.run_binary(['Thread2'])
+
+        # Thread Cleanup: Exit by return.
+        self.assertIn('Thread 2 ok.', stderr)
+
+    @expectedFailureIf(HAS_SGX)
+    def test_511_thread2_nosgx(self):
+        stdout, stderr = self.run_binary(['Thread2'])
+
+        # The 2 following tests are currently broken on SGX because TCS slots
+        # are not reused yet (needed because of thread limit), see issue #517.
+
+        # Thread Cleanup: Exit by DkThreadExit.
+        self.assertIn('Thread 3 ok.', stderr)
+        self.assertNotIn('Exiting thread 3 failed.', stderr)
+
+        # Thread Cleanup: Can still start threads.
+        self.assertIn('Thread 4 ok.', stderr)
+
+    def test_900_misc(self):
+        stdout, stderr = self.run_binary(['Misc'])
+        # Query System Time
+        self.assertIn('Query System Time OK', stderr)
+
+        # Delay Execution for 10000 Microseconds
+        self.assertIn('Delay Execution for 10000 Microseconds OK', stderr)
+
+        # Delay Execution for 3 Seconds
+        self.assertIn('Delay Execution for 3 Seconds OK', stderr)
+
+        # Generate Random Bits
+        self.assertIn('Generate Random Bits OK', stderr)
+
+    def test_910_hex(self):
+        stdout, stderr = self.run_binary(['Hex'])
+        # Hex 2 String Helper Function
+        self.assertIn('Hex test 1 is deadbeef', stderr)
+        self.assertIn('Hex test 2 is cdcdcdcdcdcdcdcd', stderr)
+
+class TC_21_ProcessCreation(RegressionTestCase):
+    def test_100_process(self):
+        stdout, stderr = self.run_binary(['Process'], timeout=8)
+        counter = collections.Counter(stderr.split('\n'))
+        # Process Creation
+        self.assertEqual(counter['Child Process Created'], 3)
+
+        # Process Creation Arguments
+        self.assertEqual(counter['argv[0] = Process'], 3)
+        self.assertEqual(counter['argv[1] = Child'], 3)
+
+        # Process Channel Transmission
+        self.assertEqual(counter['Process Write 1 OK'], 3)
+        self.assertEqual(counter['Process Read 1: Hello World 1'], 3)
+        self.assertEqual(counter['Process Write 2 OK'], 3)
+        self.assertEqual(counter['Process Read 2: Hello World 2'], 3)
+
+    def test_110_process_broadcast(self):
+        stdout, stderr = self.run_binary(['Process'], timeout=8)
+        counter = collections.Counter(stderr.split('\n'))
+
+        # Multi-Process Broadcast Channel Transmission
+        if ('Warning: broadcast stream is not open. '
+                'Do you have a multicast route configured?') in stderr:
+            self.skipTest('Could not open broadcast stream. '
+                'Do you have a multicast route configured?')
+
+        self.assertEqual(counter['Broadcast Write OK'], 1)
+        self.assertEqual(counter['Broadcast Read: Hello World 1'], 3)
+
+    def test_200_process2(self):
+        # Process Creation with a Different Binary
+        stdout, stderr = self.run_binary(['Process2'])
+        counter = collections.Counter(stderr.split('\n'))
+        self.assertEqual(counter['User Program Started'], 1)
+
+    def test_300_process3(self):
+        # Process Creation without Executable
+        stdout, stderr = self.run_binary(['Process3'])
+        counter = collections.Counter(stderr.split('\n'))
+        self.assertEqual(counter['Binary 1 Preloaded'], 2)
+        self.assertEqual(counter['Binary 2 Preloaded'], 2)
+
+
+@unittest.skipIf(HAS_SGX, 'GIPC not supported on SGX')
+
+## XXX Should really be running these tests as part of CI
+@unittest.skipUnless(pathlib.Path('/dev/gipc').exists(), 'GIPC not loaded')
+
+class TC_22_GIPC(RegressionTestCase):
+    def test_000_gipc(self):
+        with open('ipc_mapping.tmp', 'w') as file:
+            file.write('Hello World')
+            os.ftruncate(file.fileno(), mmap.PAGESIZE)
+
+        stdout, stderr = self.run_binary(['Ipc'])
+        counter = collections.Counter(stderr.split('\n'))
+
+        # Create and Join Physical Memory Bulk Copy Store
+        self.assertEqual(counter['Create Physical Memory Store OK'], 5)
+        self.assertEqual(counter['Join Physical Memory Store OK'], 5)
+
+        # Map and Commit Anonymous Physical Memory
+        self.assertIn('[Test 1] Physical Memory Commit OK', stderr)
+        self.assertIn('[Test 1] Physical Memory Map   : Hello World', stderr)
+
+        # Transfer Anonymous Physical Memory as Copy-on-Write
+        self.assertIn('[Test 1] Sender   After  Commit: Hello World, Alice', stderr)
+        self.assertIn('[Test 1] Sender   Before Map   : Alice, Hello World', stderr)
+        self.assertIn('[Test 1] Receiver After  Map   : Hello World, Bob', stderr)
+        self.assertIn('[Test 1] Sender   After  Map   : Alice, Hello World', stderr)
+
+        # Map and Commit Untouched Physical Memory
+        self.assertIn('[Test 2] Physical Memory Commit OK', stderr)
+        self.assertIn('[Test 2] Physical Memory Map   : ', stderr)
+        self.assertIn('[Test 2] Sender   After  Commit: Hello World, Alice', stderr)
+        self.assertIn('[Test 2] Sender   Before Map   : Alice, Hello World', stderr)
+        self.assertIn('[Test 2] Receiver After  Map   : Hello World, Bob', stderr)
+        self.assertIn('[Test 2] Sender   After  Map   : Alice, Hello World', stderr)
+
+        # Map and Commit File-Backed Physical Memory
+        self.assertIn('[Test 3] Physical Memory Commit OK', stderr)
+        self.assertIn('[Test 3] Physical Memory Map   : Hello World', stderr)
+        self.assertIn('[Test 3] Sender   After  Commit: Hello World', stderr)
+        self.assertIn('[Test 3] Receiver After  Map   : Hello World, Bob', stderr)
+        self.assertIn('[Test 3] Sender   After  Map   : Hello World', stderr)
+
+        # Map and Commit File-Backed Physical Memory Beyond File Size
+        self.assertIn('[Test 4] Physical Memory Commit OK', stderr)
+        self.assertIn('[Test 4] Physical Memory Map   : Memory Fault', stderr)
+
+        # Map and Commit Huge Physical Memory
+        self.assertIn('[Test 5] Physical Memory Commit OK', stderr)
+        self.assertIn('[Test 5] Physical Memory Map   : Hello World', stderr)
+
+class TC_23_SendHandle(RegressionTestCase):
+    def test_000_send_handle(self):
+        stdout, stderr = self.run_binary(['SendHandle'])
+        counter = collections.Counter(stderr.split('\n'))
+
+        # Send and Receive Handles across Processes
+        self.assertEqual(counter['Send Handle OK'], 3)
+        self.assertEqual(counter['Receive Handle OK'], 3)
+
+        # Send Pipe Handle
+        self.assertEqual(counter['Receive Pipe Handle: Hello World'], 1)
+
+        # Send Socket Handle
+        self.assertEqual(counter['Receive Socket Handle: Hello World'], 1)
+
+        # Send File Handle
+        self.assertEqual(counter['Receive File Handle: Hello World'], 1)
+
+
+@unittest.expectedFailure
+class TC_30_Sandbox_Bootstrap(SandboxTestCase, TC_01_Bootstrap):
+    pass
+@unittest.expectedFailure
+class TC_31_Sandbox_Process(SandboxTestCase, TC_21_ProcessCreation):
+    pass
+
+@unittest.skipUnless(HAS_SGX, 'need SGX')
+class TC_40_AVXDisable(RegressionTestCase):
+    @unittest.expectedFailure
+    def test_000_avx_disable(self):
+        # Disable AVX bit in XFRM
+        stdout, stderr = self.run_binary(['AvxDisable'])
+        self.assertIn('Illegal instruction executed in enclave', stderr)

+ 54 - 81
Scripts/regression.py

@@ -1,92 +1,65 @@
-import sys, os, subprocess, re, time, signal
+import contextlib
+import os
+import pathlib
+import signal
+import subprocess
+import unittest
 
-class Result:
-    def __init__(self, out, log, code):
-        self.out = out.split('\n')
-        self.log = log.split('\n')
-        self.code = code
+HAS_SGX = os.environ.get('SGX_RUN') == '1'
 
-class Regression:
-    def __init__(self, loader = None, executable = '', prepare = None, timeout = 0):
-        self.loader = loader
-        self.executable = executable
-        self.prepare = prepare
-        self.runs = dict()
-        default_timeout = int(os.getenv('TIMEOUT', '10000'))
-        if default_timeout > timeout:
-            self.timeout = default_timeout
-        else:
-            self.timeout = timeout
-        self.keep_log = (os.getenv('KEEP_LOG', '0') == '1')
+def expectedFailureIf(predicate):
+    if predicate:
+        return unittest.expectedFailure
+    return lambda func: func
 
-    def add_check(self, name, check, times = 1, ignore_failure=0, args = []):
-        combined_args = ' '.join(args)
-        if not combined_args in self.runs:
-            self.runs[combined_args] = []
-        self.runs[combined_args].append((name, check, ignore_failure, times))
+class RegressionTestCase(unittest.TestCase):
+    LOADER_ENV = 'PAL_LOADER'
+    DEFAULT_TIMEOUT = (20 if HAS_SGX else 10)
 
-    def run_checks(self):
-        something_failed = 0
-        for combined_args in self.runs:
-            needed_times = 1
-            for (name, check, ignore_failure, times) in self.runs[combined_args]:
-                if needed_times < times:
-                    needed_times = times
+    def get_manifest(self, filename):
+        return filename + '.manifest' + ('.sgx' if HAS_SGX else '')
 
-            run_times = 0
-            outputs = []
-            timed_out = False
-            while run_times < needed_times:
-                args = []
-                if self.loader:
-                    args.append(self.loader)
-                if self.executable:
-                    args.append(self.executable)
-                if combined_args:
-                    args += combined_args.split(' ')
+    def run_binary(self, args, *, timeout=None, **kwds):
+        timeout = (max(self.DEFAULT_TIMEOUT, timeout) if timeout is not None
+            else self.DEFAULT_TIMEOUT)
 
-                if self.prepare:
-                    self.prepare(args)
+        try:
+            loader = os.environ[self.LOADER_ENV]
+        except KeyError:
+            self.skipTest(
+                'environment variable {} unset'.format(self.LOADER_ENV))
 
-                p = subprocess.Popen(args,
-                                     stdout=subprocess.PIPE,
-                                     stderr=subprocess.PIPE,
-                                     preexec_fn=os.setpgrp)
-                try:
-                    out, log = p.communicate(timeout=self.timeout * 0.001)
-                except subprocess.TimeoutExpired:
-                    timed_out = True
-                    os.killpg(p.pid, signal.SIGKILL)
-                    out, log = p.communicate()
+        if not pathlib.Path(loader).exists():
+            self.skipTest('loader ({}) not found'.format(loader))
 
-                out = out.decode('utf-8')
-                log = log.decode('utf-8')
+        with subprocess.Popen([loader, *args],
+                stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+                preexec_fn=os.setpgrp,
+                **kwds) as process:
+            try:
+                stdout, stderr = process.communicate(timeout=timeout)
+            except subprocess.TimeoutExpired:
+                os.killpg(process.pid, signal.SIGKILL)
+                self.fail('timeout ({} s) expired'.format(timeout))
 
-                outputs.append(Result(out, log, p.returncode))
+            if process.returncode:
+                raise subprocess.CalledProcessError(
+                    process.returncode, args, stdout, stderr)
 
-                run_times = run_times + 1
-                keep_log = False
-                for (name, check, ignore_failure, times) in self.runs[combined_args]:
-                    if run_times == times:
-                        result = check(outputs)
-                        if not timed_out and result:
-                            print('\033[92m[Success       ]\033[0m', name)
-                        else:
-                            if ignore_failure:
-                                print('[Fail (Ignored)]', name)
-                            else:
-                                print('\033[93m[Fail          ]\033[0m', name)
-                                something_failed = 1
-                            if timed_out : print('Test timed out!')
-                            keep_log = True
+        return stdout.decode(), stderr.decode()
 
-                if self.keep_log and keep_log:
-                    sargs = [re.sub(r"\W", '_', a).strip('_') for a in args]
-                    filename = 'log-' + '_'.join(sargs) + '_' + time.strftime("%Y%m%d_%H%M%S")
-                    with open(filename, 'w') as f:
-                        f.write(log + out)
-                    print('keep log to %s' % (filename))
-        if something_failed:
-            return -1
-        else:
-            return 0
+    @contextlib.contextmanager
+    def expect_returncode(self, returncode):
+        if returncode == 0:
+            raise ValueError('expected returncode should be nonzero')
+        try:
+            yield
+            self.fail('did not fail (expected {})'.format(returncode))
+        except subprocess.CalledProcessError as e:
+            self.assertEqual(e.returncode, returncode,
+                'failed with returncode {} (expected {})'.format(
+                    e.returncode, returncode))
+
+
+class SandboxTestCase(RegressionTestCase):
+    LOADER_ENV = 'PAL_SEC'

Some files were not shown because too many files changed in this diff